Example usage for org.apache.lucene.store RAMDirectory listAll

List of usage examples for org.apache.lucene.store RAMDirectory listAll

Introduction

In this page you can find the example usage for org.apache.lucene.store RAMDirectory listAll.

Prototype

@Override
    public final String[] listAll() 

Source Link

Usage

From source file:DocIndexer.java

License:Apache License

private byte[] zip(RAMDirectory dir) throws IOException {
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    try (ZipOutputStream zip = new ZipOutputStream(buf)) {
        for (String name : dir.listAll()) {
            try (IndexInput in = dir.openInput(name, null)) {
                int len = (int) in.length();
                byte[] tmp = new byte[len];
                ZipEntry entry = new ZipEntry(name);
                entry.setSize(len);/*from  w ww .  j av a2s . c  om*/
                in.readBytes(tmp, 0, len);
                zip.putNextEntry(entry);
                zip.write(tmp, 0, len);
                zip.closeEntry();
            }
        }
    }

    return buf.toByteArray();
}

From source file:com.eclipsesource.connect.search.IndexerTest.java

License:Open Source License

@Test
public void testFillsIndex() {
    Map<String, String> data = new HashMap<>();
    data.put("bar", "bar1");
    RAMDirectory directory = new RAMDirectory();
    Indexer indexer = new Indexer(createParticipant("foo", data, "foo1", "foo2"), new DirectoryStatusHolder(),
            directory);//from  ww w .j  av  a 2  s  . c  o  m

    indexer.run();

    assertThat(directory.listAll()).isNotEmpty();
}

From source file:com.xiaomi.linden.hadoop.indexing.reduce.RAMDirectoryUtil.java

License:Apache License

/**
 * Write a number of files from a ram directory to a data output.
 * @param out  the data output/*  www.  j a  v  a2s. c om*/
 * @param dir  the ram directory
 * @throws IOException
 */
public static void writeRAMFiles(DataOutput out, RAMDirectory dir) throws IOException {
    String[] names = dir.listAll();
    out.writeInt(names.length);
    for (int i = 0; i < names.length; i++) {
        Text.writeString(out, names[i]);
        long length = dir.fileLength(names[i]);
        out.writeLong(length);

        if (length > 0) {
            // can we avoid the extra copy?
            IndexInput input = null;
            try {
                IOContext context = new IOContext();
                input = dir.openInput(names[i], context);

                int position = 0;
                byte[] buffer = new byte[BUFFER_SIZE];

                while (position < length) {
                    int len = position + BUFFER_SIZE <= length ? BUFFER_SIZE : (int) (length - position);
                    input.readBytes(buffer, 0, len);
                    out.write(buffer, 0, len);
                    position += len;
                }
            } finally {
                if (input != null) {
                    input.close();
                }
            }
        }
    }
}

From source file:org.apache.blur.filter.FilterCacheTest.java

License:Apache License

@Test
public void test2() throws IOException {
    Filter filter = new QueryWrapperFilter(new TermQuery(new Term("f1", "t1")));
    FilterCache filterCache = new FilterCache("filter1", filter);
    RAMDirectory directory = new RAMDirectory();
    writeDocs(filterCache, directory);/* www  .  j  a v  a  2s.  co m*/
    DirectoryReader reader = DirectoryReader.open(directory);

    IndexSearcher searcher = new IndexSearcher(reader);

    Query query = new TermQuery(new Term("f2", "t2"));
    TopDocs topDocs1 = searcher.search(query, filterCache, 10);
    assertEquals(1, filterCache.getMisses());
    assertEquals(0, filterCache.getHits());
    assertEquals(1, topDocs1.totalHits);

    TopDocs topDocs2 = searcher.search(query, filterCache, 10);
    assertEquals(1, filterCache.getMisses());
    assertEquals(1, filterCache.getHits());
    assertEquals(1, topDocs2.totalHits);

    TopDocs topDocs3 = searcher.search(query, filterCache, 10);
    assertEquals(1, filterCache.getMisses());
    assertEquals(2, filterCache.getHits());
    assertEquals(1, topDocs3.totalHits);

    System.out.println("===============");
    for (String s : new TreeSet<String>(Arrays.asList(directory.listAll()))) {
        System.out.println(s);
    }

    writeDocs(filterCache, directory);

    System.out.println("===============");
    for (String s : new TreeSet<String>(Arrays.asList(directory.listAll()))) {
        System.out.println(s);
    }
}

From source file:org.apache.blur.filter.IndexFileBitSetTest.java

License:Apache License

@Test
public void test() throws IOException {
    Random random = new Random(_seed);
    int numBits = random.nextInt(10000000);
    FixedBitSet fixedBitSet = new FixedBitSet(numBits);
    populate(random, numBits, fixedBitSet);
    String id = "id";
    String segmentName = "seg1";
    RAMDirectory directory = new RAMDirectory();
    IndexFileBitSet indexFileBitSet = new IndexFileBitSet(numBits, id, segmentName, directory);
    assertFalse(indexFileBitSet.exists());
    indexFileBitSet.create(fixedBitSet.iterator());
    indexFileBitSet.load();/*from  www. j  a v a2 s . co  m*/
    checkEquals(fixedBitSet.iterator(), indexFileBitSet.iterator(), numBits);
    indexFileBitSet.close();

    String[] listAll = directory.listAll();
    for (String s : listAll) {
        System.out.println(s + " " + directory.fileLength(s));
    }
}

From source file:org.apache.blur.lucene.codec.Blur022CodecTest.java

License:Apache License

@Test
public void testLargeDocs() throws IOException {
    Random random = new Random();
    Iterable<? extends IndexableField> doc = getLargeDoc(random);
    RAMDirectory directory = new RAMDirectory();
    IndexWriterConfig conf1 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf1.setCodec(new Blur022Codec());
    IndexWriter writer1 = new IndexWriter(directory, conf1);
    writer1.addDocument(doc);//from  ww  w  .ja  v a 2s  .  com
    writer1.close();

    DirectoryReader reader1 = DirectoryReader.open(directory);
    int numDocs1 = reader1.numDocs();
    assertEquals(1, numDocs1);

    // for (int i = 0; i < numDocs1; i++) {
    // System.out.println(reader1.document(i));
    // }

    IndexWriterConfig conf2 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf2.setCodec(new Blur022Codec(1 << 16, CompressionMode.HIGH_COMPRESSION));
    IndexWriter writer2 = new IndexWriter(directory, conf2);
    writer2.addDocument(doc);
    writer2.close();

    DirectoryReader reader2 = DirectoryReader.open(directory);
    int numDocs2 = reader2.numDocs();
    assertEquals(2, numDocs2);

    for (int i = 0; i < 2; i++) {

        long t1 = System.nanoTime();
        Document document1 = reader1.document(0);
        long t2 = System.nanoTime();
        Document document2 = reader2.document(1);
        long t3 = System.nanoTime();

        System.out.println((t3 - t2) / 1000000.0);
        System.out.println((t2 - t1) / 1000000.0);

        System.out.println("doc1 " + document1.hashCode());
        System.out.println("doc2 " + document2.hashCode());
    }

    // for (int i = 0; i < numDocs2; i++) {
    // System.out.println(reader2.document(i));
    // }

    // long fileLength = directory.fileLength("_0.fdt");

    for (String name : directory.listAll()) {
        if (name.endsWith(".fdt")) {
            System.out.println(name);
            System.out.println(directory.fileLength(name));
        }
    }

}

From source file:org.apache.blur.lucene.codec.Blur022CodecTest.java

License:Apache License

@Test
public void testSmallDocs() throws IOException {

    RAMDirectory directory = new RAMDirectory();
    IndexWriterConfig conf1 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf1.setCodec(new Blur022Codec());
    Random random1 = new Random(1);
    IndexWriter writer1 = new IndexWriter(directory, conf1);
    for (int i = 0; i < 1000; i++) {
        writer1.addDocument(getSmallDoc(random1));
    }//from  w  w  w .j a  va  2 s  .co m
    writer1.close();

    DirectoryReader reader1 = DirectoryReader.open(directory);
    int numDocs1 = reader1.numDocs();
    assertEquals(1000, numDocs1);

    // for (int i = 0; i < numDocs1; i++) {
    // System.out.println(reader1.document(i));
    // }

    IndexWriterConfig conf2 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf2.setCodec(new Blur022Codec(1 << 16, CompressionMode.HIGH_COMPRESSION));
    Random random2 = new Random(1);
    IndexWriter writer2 = new IndexWriter(directory, conf2);
    for (int i = 0; i < 1000; i++) {
        writer2.addDocument(getSmallDoc(random2));
    }
    writer2.close();

    DirectoryReader reader2 = DirectoryReader.open(directory);
    int numDocs2 = reader2.numDocs();
    assertEquals(2000, numDocs2);

    for (int i = 0; i < 2; i++) {

        long t1 = System.nanoTime();
        long hash1 = 0;
        long hash2 = 0;
        for (int d = 0; d < 1000; d++) {
            Document document1 = reader1.document(d);
            hash1 += document1.hashCode();
        }
        long t2 = System.nanoTime();
        for (int d = 0; d < 1000; d++) {
            Document document2 = reader2.document(d + 1000);
            hash2 += document2.hashCode();
        }
        long t3 = System.nanoTime();

        System.out.println((t3 - t2) / 1000000.0);
        System.out.println((t2 - t1) / 1000000.0);

        System.out.println("doc1 " + hash1);
        System.out.println("doc2 " + hash2);
    }

    // for (int i = 0; i < numDocs2; i++) {
    // System.out.println(reader2.document(i));
    // }

    // long fileLength = directory.fileLength("_0.fdt");

    for (String name : directory.listAll()) {
        if (name.endsWith(".fdt")) {
            System.out.println(name);
            System.out.println(directory.fileLength(name));
        }
    }
}

From source file:org.apache.blur.lucene.codec.Blur024CodecTest.java

License:Apache License

@Test
public void testLargeDocs() throws IOException {
    Random random = new Random();
    Iterable<? extends IndexableField> doc = getLargeDoc(random);
    RAMDirectory directory = new RAMDirectory();
    IndexWriterConfig conf1 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf1.setCodec(new Blur024Codec());
    IndexWriter writer1 = new IndexWriter(directory, conf1);
    writer1.addDocument(doc);/*from w  ww  .  j  a  v a 2 s . co  m*/
    writer1.close();

    DirectoryReader reader1 = DirectoryReader.open(directory);
    int numDocs1 = reader1.numDocs();
    assertEquals(1, numDocs1);

    // for (int i = 0; i < numDocs1; i++) {
    // System.out.println(reader1.document(i));
    // }

    IndexWriterConfig conf2 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf2.setCodec(new Blur024Codec(1 << 16, CompressionMode.HIGH_COMPRESSION));
    IndexWriter writer2 = new IndexWriter(directory, conf2);
    writer2.addDocument(doc);
    writer2.close();

    DirectoryReader reader2 = DirectoryReader.open(directory);
    int numDocs2 = reader2.numDocs();
    assertEquals(2, numDocs2);

    for (int i = 0; i < 2; i++) {

        long t1 = System.nanoTime();
        Document document1 = reader1.document(0);
        long t2 = System.nanoTime();
        Document document2 = reader2.document(1);
        long t3 = System.nanoTime();

        System.out.println((t3 - t2) / 1000000.0);
        System.out.println((t2 - t1) / 1000000.0);

        System.out.println("doc1 " + document1.hashCode());
        System.out.println("doc2 " + document2.hashCode());
    }

    // for (int i = 0; i < numDocs2; i++) {
    // System.out.println(reader2.document(i));
    // }

    // long fileLength = directory.fileLength("_0.fdt");

    for (String name : directory.listAll()) {
        if (name.endsWith(".fdt")) {
            System.out.println(name);
            System.out.println(directory.fileLength(name));
        }
    }

}

From source file:org.apache.blur.lucene.codec.Blur024CodecTest.java

License:Apache License

@Test
public void testSmallDocs() throws IOException {

    RAMDirectory directory = new RAMDirectory();
    IndexWriterConfig conf1 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf1.setCodec(new Blur024Codec());
    Random random1 = new Random(1);
    IndexWriter writer1 = new IndexWriter(directory, conf1);
    for (int i = 0; i < 1000; i++) {
        writer1.addDocument(getSmallDoc(random1));
    }/*w  ww.jav  a 2s .  c  o m*/
    writer1.close();

    DirectoryReader reader1 = DirectoryReader.open(directory);
    int numDocs1 = reader1.numDocs();
    assertEquals(1000, numDocs1);

    // for (int i = 0; i < numDocs1; i++) {
    // System.out.println(reader1.document(i));
    // }

    IndexWriterConfig conf2 = new IndexWriterConfig(Version.LUCENE_43,
            new WhitespaceAnalyzer(Version.LUCENE_43));
    conf2.setCodec(new Blur024Codec(1 << 16, CompressionMode.HIGH_COMPRESSION));
    Random random2 = new Random(1);
    IndexWriter writer2 = new IndexWriter(directory, conf2);
    for (int i = 0; i < 1000; i++) {
        writer2.addDocument(getSmallDoc(random2));
    }
    writer2.close();

    DirectoryReader reader2 = DirectoryReader.open(directory);
    int numDocs2 = reader2.numDocs();
    assertEquals(2000, numDocs2);

    for (int i = 0; i < 2; i++) {

        long t1 = System.nanoTime();
        long hash1 = 0;
        long hash2 = 0;
        for (int d = 0; d < 1000; d++) {
            Document document1 = reader1.document(d);
            hash1 += document1.hashCode();
        }
        long t2 = System.nanoTime();
        for (int d = 0; d < 1000; d++) {
            Document document2 = reader2.document(d + 1000);
            hash2 += document2.hashCode();
        }
        long t3 = System.nanoTime();

        System.out.println((t3 - t2) / 1000000.0);
        System.out.println((t2 - t1) / 1000000.0);

        System.out.println("doc1 " + hash1);
        System.out.println("doc2 " + hash2);
    }

    // for (int i = 0; i < numDocs2; i++) {
    // System.out.println(reader2.document(i));
    // }

    // long fileLength = directory.fileLength("_0.fdt");

    for (String name : directory.listAll()) {
        if (name.endsWith(".fdt")) {
            System.out.println(name);
            System.out.println(directory.fileLength(name));
        }
    }
}