Example usage for org.apache.lucene.search IndexSearcher getIndexReader

List of usage examples for org.apache.lucene.search IndexSearcher getIndexReader

Introduction

In this page you can find the example usage for org.apache.lucene.search IndexSearcher getIndexReader.

Prototype

public IndexReader getIndexReader() 

Source Link

Document

Return the IndexReader this searches.

Usage

From source file:org.apache.maven.index.cli.NexusIndexerCli.java

License:Apache License

private void index(final CommandLine cli, PlexusContainer plexus)
        throws ComponentLookupException, IOException, UnsupportedExistingLuceneIndexException {
    String indexDirectoryName = cli.getOptionValue(INDEX);

    File indexFolder = new File(indexDirectoryName);

    String outputDirectoryName = cli.getOptionValue(TARGET_DIR, ".");

    File outputFolder = new File(outputDirectoryName);

    File repositoryFolder = new File(cli.getOptionValue(REPO));

    String repositoryName = cli.getOptionValue(NAME, indexFolder.getName());

    List<IndexCreator> indexers = getIndexers(cli, plexus);

    boolean createChecksums = cli.hasOption(CREATE_FILE_CHECKSUMS);

    boolean createIncrementalChunks = cli.hasOption(CREATE_INCREMENTAL_CHUNKS);

    boolean debug = cli.hasOption(DEBUG);

    boolean quiet = cli.hasOption(QUIET);

    Integer chunkCount = cli.hasOption(INCREMENTAL_CHUNK_KEEP_COUNT)
            ? Integer.parseInt(cli.getOptionValue(INCREMENTAL_CHUNK_KEEP_COUNT))
            : null;//from w  w  w. j av a2s  .c  o  m

    if (!quiet) {
        System.err.printf("Repository Folder: %s\n", repositoryFolder.getAbsolutePath());
        System.err.printf("Index Folder:      %s\n", indexFolder.getAbsolutePath());
        System.err.printf("Output Folder:     %s\n", outputFolder.getAbsolutePath());
        System.err.printf("Repository name:   %s\n", repositoryName);
        System.err.printf("Indexers: %s\n", indexers.toString());

        if (createChecksums) {
            System.err.printf("Will create checksum files for all published files (sha1, md5).\n");
        } else {
            System.err.printf("Will not create checksum files.\n");
        }

        if (createIncrementalChunks) {
            System.err.printf("Will create incremental chunks for changes, along with baseline file.\n");
        } else {
            System.err.printf("Will create baseline file.\n");
        }
    }

    NexusIndexer indexer = plexus.lookup(NexusIndexer.class);

    long tstart = System.currentTimeMillis();

    IndexingContext context = indexer.addIndexingContext( //
            repositoryName, // context id
            repositoryName, // repository id
            repositoryFolder, // repository folder
            indexFolder, // index folder
            null, // repositoryUrl
            null, // index update url
            indexers);

    try {
        IndexPacker packer = plexus.lookup(IndexPacker.class);

        ArtifactScanningListener listener = new IndexerListener(context, debug, quiet);

        indexer.scan(context, listener, true);

        IndexSearcher indexSearcher = context.acquireIndexSearcher();

        try {
            IndexPackingRequest request = new IndexPackingRequest(context, indexSearcher.getIndexReader(),
                    outputFolder);

            request.setCreateChecksumFiles(createChecksums);

            request.setCreateIncrementalChunks(createIncrementalChunks);

            request.setFormats(Arrays.asList(IndexFormat.FORMAT_V1));

            if (chunkCount != null) {
                request.setMaxIndexChunks(chunkCount.intValue());
            }

            packIndex(packer, request, debug, quiet);
        } finally {
            context.releaseIndexSearcher(indexSearcher);
        }

        if (!quiet) {
            printStats(tstart);
        }
    } finally {
        indexer.removeIndexingContext(context, false);
    }
}

From source file:org.apache.maven.index.ConcurrentUseWithMergedContextPublishingTest.java

License:Apache License

@Override
protected int readIndex(final NexusIndexer nexusIndexer, final IndexingContext indexingContext)
        throws IOException {
    // note: concurrent Index publishing into SAME directory is not supported and should be avoided.
    // This test had multiple threads doing it, and since it was not checking actual results of publish (that was
    // not the goal of the test, but simultaneous publishing of merged context that has member changes happening),
    // it was probably publish rubbish anyway.
    final File publish = new File(repoPublish, "publish-" + counter.getAndIncrement());

    final IndexSearcher indexSearcher = context.acquireIndexSearcher();
    try {/*from ww  w  .j  av a2s .  c  om*/
        final IndexPackingRequest request = new IndexPackingRequest(context, indexSearcher.getIndexReader(),
                publish);
        request.setCreateIncrementalChunks(false);
        packer.packIndex(request);
    } finally {
        context.releaseIndexSearcher(indexSearcher);
    }

    return 1;
}

From source file:org.apache.maven.index.context.DefaultIndexingContext.java

License:Apache License

public int getSize() throws IOException {
    final IndexSearcher is = acquireIndexSearcher();
    try {/*from w w  w  .  jav a2 s  .co m*/
        return is.getIndexReader().numDocs();
    } finally {
        releaseIndexSearcher(is);
    }
}

From source file:org.apache.maven.index.context.DefaultIndexingContext.java

License:Apache License

public synchronized void rebuildGroups() throws IOException {
    final IndexSearcher is = acquireIndexSearcher();
    try {//  w w w .  j av  a  2 s. c  o  m
        final IndexReader r = is.getIndexReader();

        Set<String> rootGroups = new LinkedHashSet<String>();
        Set<String> allGroups = new LinkedHashSet<String>();

        int numDocs = r.maxDoc();
        Bits liveDocs = MultiFields.getLiveDocs(r);

        for (int i = 0; i < numDocs; i++) {
            if (liveDocs != null && !liveDocs.get(i)) {
                continue;
            }

            Document d = r.document(i);

            String uinfo = d.get(ArtifactInfo.UINFO);

            if (uinfo != null) {
                ArtifactInfo info = IndexUtils.constructArtifactInfo(d, this);
                rootGroups.add(info.getRootGroup());
                allGroups.add(info.getGroupId());
            }
        }

        setRootGroups(rootGroups);
        setAllGroups(allGroups);

        optimize();
    } finally {
        releaseIndexSearcher(is);
    }
}

From source file:org.apache.maven.index.context.NexusIndexMultiReader.java

License:Apache License

public synchronized IndexReader acquire() throws IOException {
    if (searchers != null) {
        release();/* w  w  w .j a v  a  2s  .c o m*/
        throw new IllegalStateException("acquire() called 2nd time without release() in between!");
    }
    this.searchers = new ArrayList<IndexSearcher>();
    final ArrayList<IndexReader> contextReaders = new ArrayList<IndexReader>(contexts.size());
    for (IndexingContext ctx : contexts) {
        final IndexSearcher indexSearcher = ctx.acquireIndexSearcher();
        searchers.add(indexSearcher);
        contextReaders.add(indexSearcher.getIndexReader());
    }
    return new MultiReader(contextReaders.toArray(new IndexReader[contextReaders.size()]));
}

From source file:org.apache.maven.index.DefaultIndexNexusIndexerTest.java

License:Apache License

public void testIndexTimestamp() throws Exception {
    final File targetDir = Files.createTempDirectory("testIndexTimestamp").toFile();
    targetDir.deleteOnExit();/*from www . j  ava  2 s.com*/

    final IndexPacker indexPacker = lookup(IndexPacker.class);
    final IndexSearcher indexSearcher = context.acquireIndexSearcher();
    try {
        final IndexPackingRequest request = new IndexPackingRequest(context, indexSearcher.getIndexReader(),
                targetDir);
        indexPacker.packIndex(request);
    } finally {
        context.releaseIndexSearcher(indexSearcher);
    }

    Thread.sleep(1000L);

    File newIndex = new File(getBasedir(), "target/test-new");

    Directory newIndexDir = FSDirectory.open(newIndex.toPath());

    IndexingContext newContext = nexusIndexer.addIndexingContext("test-new", "test", null, newIndexDir, null,
            null, DEFAULT_CREATORS);

    final IndexUpdater indexUpdater = lookup(IndexUpdater.class);
    indexUpdater.fetchAndUpdateIndex(
            new IndexUpdateRequest(newContext, new DefaultIndexUpdater.FileFetcher(targetDir)));

    assertEquals(context.getTimestamp().getTime(), newContext.getTimestamp().getTime());

    assertEquals(context.getTimestamp(), newContext.getTimestamp());

    // make sure context has the same artifacts

    Query query = nexusIndexer.constructQuery(MAVEN.GROUP_ID, "qdox", SearchType.SCORED);

    FlatSearchRequest request = new FlatSearchRequest(query, newContext);
    FlatSearchResponse response = nexusIndexer.searchFlat(request);
    Collection<ArtifactInfo> r = response.getResults();

    System.out.println(r);

    assertEquals(2, r.size());

    List<ArtifactInfo> list = new ArrayList<ArtifactInfo>(r);

    assertEquals(2, list.size());

    ArtifactInfo ai = list.get(0);

    assertEquals("1.6.1", ai.getVersion());

    ai = list.get(1);

    assertEquals("1.5", ai.getVersion());

    assertEquals("test", ai.getRepository());

    Date timestamp = newContext.getTimestamp();

    newContext.close(false);

    newIndexDir = FSDirectory.open(newIndex.toPath());

    newContext = nexusIndexer.addIndexingContext("test-new", "test", null, newIndexDir, null, null,
            DEFAULT_CREATORS);

    indexUpdater.fetchAndUpdateIndex(
            new IndexUpdateRequest(newContext, new DefaultIndexUpdater.FileFetcher(targetDir)));

    assertEquals(timestamp, newContext.getTimestamp());

    newContext.close(true);

    assertFalse(new File(newIndex, "timestamp").exists());
}

From source file:org.apache.maven.index.DefaultScannerListener.java

License:Apache License

private void initialize(IndexingContext ctx) throws IOException, CorruptIndexException {
    final IndexSearcher indexSearcher = ctx.acquireIndexSearcher();
    try {//  w  w  w.j  av a 2 s  .c  o m
        final IndexReader r = indexSearcher.getIndexReader();
        Bits liveDocs = MultiFields.getLiveDocs(r);

        for (int i = 0; i < r.maxDoc(); i++) {
            if (liveDocs == null || liveDocs.get(i)) {
                Document d = r.document(i);

                String uinfo = d.get(ArtifactInfo.UINFO);

                if (uinfo != null) {
                    // if ctx is receiving updates (in other words, is a proxy),
                    // there is no need to build a huge Set of strings with all uinfo's
                    // as deletion detection in those cases have no effect. Also, the
                    // removeDeletedArtifacts() method, that uses info gathered in this set
                    // is invoked with same condition. As indexes of Central are getting huge,
                    // the set grows enormously too, but is actually not used
                    if (!ctx.isReceivingUpdates()) {
                        uinfos.add(uinfo);
                    }

                    // add all existing groupIds to the lists, as they will
                    // not be "discovered" and would be missing from the new list..
                    String groupId = uinfo.substring(0, uinfo.indexOf('|'));
                    int n = groupId.indexOf('.');
                    groups.add(n == -1 ? groupId : groupId.substring(0, n));
                    allGroups.add(groupId);
                }
            }
        }
    } finally {
        ctx.releaseIndexSearcher(indexSearcher);
    }
}

From source file:org.apache.maven.index.FSDirectoryDeleteTest.java

License:Apache License

public void testIndexAndDelete() throws Exception {
    final IndexSearcher indexSearcher = context.acquireIndexSearcher();
    final IndexSearcher otherIndexSearcher = otherContext.acquireIndexSearcher();

    indexSearcher.getIndexReader().maxDoc();
    otherIndexSearcher.getIndexReader().maxDoc();

    context.releaseIndexSearcher(indexSearcher);
    otherContext.releaseIndexSearcher(otherIndexSearcher);

    context.replace(otherIndexDir);//  w w w  .  jav  a2s  .  c  om

    context.merge(otherIndexDir);
}

From source file:org.apache.maven.index.FullIndexNexusIndexerTest.java

License:Apache License

public void testIndexTimestamp() throws Exception {
    final File targetDir = Files.createTempDirectory("testIndexTimestamp").toFile();
    targetDir.deleteOnExit();/*from w  w  w. j av a  2  s  .  c  o  m*/

    final IndexPacker indexPacker = lookup(IndexPacker.class);
    final IndexSearcher indexSearcher = context.acquireIndexSearcher();
    try {
        final IndexPackingRequest request = new IndexPackingRequest(context, indexSearcher.getIndexReader(),
                targetDir);
        indexPacker.packIndex(request);
    } finally {
        context.releaseIndexSearcher(indexSearcher);
    }

    Thread.sleep(1000L);

    File newIndex = new File(getBasedir(), "target/test-new");

    Directory newIndexDir = FSDirectory.open(newIndex.toPath());

    IndexingContext newContext = nexusIndexer.addIndexingContext("test-new", "test", null, newIndexDir, null,
            null, DEFAULT_CREATORS);

    final IndexUpdater indexUpdater = lookup(IndexUpdater.class);
    indexUpdater.fetchAndUpdateIndex(
            new IndexUpdateRequest(newContext, new DefaultIndexUpdater.FileFetcher(targetDir)));

    assertEquals(context.getTimestamp().getTime(), newContext.getTimestamp().getTime());

    assertEquals(context.getTimestamp(), newContext.getTimestamp());

    // make sure context has the same artifacts

    Query query = nexusIndexer.constructQuery(MAVEN.GROUP_ID, "qdox", SearchType.SCORED);

    FlatSearchRequest request = new FlatSearchRequest(query, newContext);
    FlatSearchResponse response = nexusIndexer.searchFlat(request);
    Collection<ArtifactInfo> r = response.getResults();

    assertEquals(2, r.size());

    List<ArtifactInfo> list = new ArrayList<>(r);

    assertEquals(2, list.size());

    ArtifactInfo ai = list.get(0);

    assertEquals("1.6.1", ai.getVersion());

    ai = list.get(1);

    assertEquals("1.5", ai.getVersion());

    assertEquals("test", ai.getRepository());

    Date timestamp = newContext.getTimestamp();

    newContext.close(false);

    newIndexDir = FSDirectory.open(newIndex.toPath());

    newContext = nexusIndexer.addIndexingContext("test-new", "test", null, newIndexDir, null, null,
            DEFAULT_CREATORS);

    indexUpdater.fetchAndUpdateIndex(
            new IndexUpdateRequest(newContext, new DefaultIndexUpdater.FileFetcher(targetDir)));

    assertEquals(timestamp, newContext.getTimestamp());

    newContext.close(true);

    assertFalse(new File(newIndex, "timestamp").exists());
}

From source file:org.apache.maven.index.incremental.DefaultIncrementalHandlerTest.java

License:Apache License

public void testUpdateInvalidProperties() throws Exception {
    final IndexSearcher indexSearcher = context.acquireIndexSearcher();
    try {/*from   ww w  . j av a2 s .com*/
        Properties properties = new Properties();

        IndexPackingRequest request = new IndexPackingRequest(context, indexSearcher.getIndexReader(),
                indexDir);

        // No properties definite fail
        assertNull(handler.getIncrementalUpdates(request, properties));

        properties.setProperty(IndexingContext.INDEX_TIMESTAMP, "junk");

        // property set, but invalid
        assertNull(handler.getIncrementalUpdates(request, properties));

        properties.setProperty(IndexingContext.INDEX_TIMESTAMP, "19991112182432.432 -0600");

        List<Integer> updates = handler.getIncrementalUpdates(request, properties);

        assertEquals(updates.size(), 0);
    } finally {
        context.releaseIndexSearcher(indexSearcher);
    }
}