Example usage for org.apache.lucene.index IndexWriterConfig setMergePolicy

List of usage examples for org.apache.lucene.index IndexWriterConfig setMergePolicy

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriterConfig setMergePolicy.

Prototype

@Override
    public IndexWriterConfig setMergePolicy(MergePolicy mergePolicy) 

Source Link

Usage

From source file:org.sonatype.nexus.index.NexusIndexingContext.java

License:Open Source License

@Override
protected IndexWriterConfig getWriterConfig() {
    final IndexWriterConfig writerConfig = super.getWriterConfig();

    // NEXUS-5380 force use of compound lucene index file to postpone "Too many open files"

    final TieredMergePolicy mergePolicy = new TieredMergePolicy();
    mergePolicy.setUseCompoundFile(true);
    mergePolicy.setNoCFSRatio(1.0);/* ww  w  .  ja  v  a 2  s.c  o  m*/

    writerConfig.setMergePolicy(mergePolicy);

    return writerConfig;
}

From source file:org.wikimedia.revdiffsearch.IndexMerger.java

License:Open Source License

/** Index all text files under a directory. */
public static void main(String[] args) {

    if (args.length != 2) {
        System.out.println("Usage: java -jar IndexMerger.jar "
                + "merged_index_dir existing_index_dir1 existing_index_dir2 ...");
        System.out.println(" merged_index_dir: A directory where the merged " + "index will be stored");
        System.out.println("   e.g. merged_indexes");
        System.out//from  w  w w . j  a  v  a  2s.co  m
                .println(" existing_indexes_dir: A directory where the " + "indexes that have to merged exist");
        System.out.println("   e.g. indexes/");
        System.out.println("   e.g.         index1");
        System.out.println("   e.g.         index2");
        System.out.println("   e.g.         index3");
        System.exit(1);
    }

    int ramBufferSizeMB = 1024;
    int ngram = 3;
    File INDEX_DIR = new File(args[0]);
    {
        String s;
        if ((s = System.getProperty("ngram")) != null) {
            ngram = Integer.parseInt(s);
        }
        if ((s = System.getProperty("ramBufferSize")) != null) {
            ramBufferSizeMB = Integer.parseInt(s);
        }
    }

    INDEX_DIR.mkdir();

    Date start = new Date();

    try {
        IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_44, new SimpleNGramAnalyzer(ngram));
        LogDocMergePolicy lmp = new LogDocMergePolicy();
        lmp.setMergeFactor(1000);
        cfg.setRAMBufferSizeMB(ramBufferSizeMB);
        cfg.setMergePolicy(lmp);

        IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR), cfg);

        // IndexWriter writer = new IndexWriter(INDEX_DIR,
        // new StandardAnalyzer(Version.LUCENE_44),
        // true);
        // writer.setMergeFactor(1000);
        // writer.setRAMBufferSizeMB(50);

        List<Directory> indexes = new ArrayList<Directory>();

        for (String indexdir : Arrays.asList(args).subList(1, args.length)) {
            System.out.println("Adding: " + indexdir);
            indexes.add(FSDirectory.open(new File(indexdir)));
        }

        System.out.print("Merging added indexes...");
        writer.addIndexes(indexes.toArray(new Directory[indexes.size()]));
        System.out.println("done");

        writer.close();
        System.out.println("done");

        Date end = new Date();
        System.out.println("It took: " + ((end.getTime() - start.getTime()) / 1000) + "\"");

    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:perf.AutoPrefixPerf.java

License:Apache License

public static void main(String[] args) throws Exception {
    String numbersFile = args[0];
    String queriesFile = args[1];
    Path indexPath = Paths.get(args[2]);

    int precStep = Integer.parseInt(args[3]);
    boolean useNumericField = (precStep != 0);
    int maxTermsInPrefix;
    int minTermsInPrefix;
    if (useNumericField == false) {
        minTermsInPrefix = Integer.parseInt(args[4]);
        maxTermsInPrefix = Integer.parseInt(args[5]);
    } else {/*w w w . j ava  2s  .c  o m*/
        minTermsInPrefix = 0;
        maxTermsInPrefix = 0;
    }

    BytesRefBuilder binaryToken = new BytesRefBuilder();
    binaryToken.grow(8);
    binaryToken.setLength(8);

    Directory dir = FSDirectory.open(indexPath);
    if (Files.notExists(indexPath) == false) {
        IndexWriterConfig iwc = new IndexWriterConfig(new StandardAnalyzer());
        iwc.setMaxBufferedDocs(30000);
        iwc.setRAMBufferSizeMB(-1);
        iwc.setMergePolicy(new LogDocMergePolicy());

        final PostingsFormat pf;

        if (useNumericField) {
            // Disable auto-prefix when testing NumericField!
            if (minTermsInPrefix != 0) {
                throw new IllegalArgumentException("only precStep or minTermsInPrefix should be non-zero");
            }
            pf = new Lucene50PostingsFormat(25, 48, 0, 0);
        } else {
            /*
            if (minTermsInPrefix == 0) {
              throw new IllegalArgumentException("one of precStep or minTermsInPrefix must be non-zero");
            }
            */
            pf = new Lucene50PostingsFormat(25, 48, minTermsInPrefix, maxTermsInPrefix);
            //pf = new Lucene50PostingsFormat(25, 48, minTermsInPrefix, Integer.MAX_VALUE);
        }

        iwc.setCodec(new Lucene53Codec() {
            @Override
            public PostingsFormat getPostingsFormatForField(String field) {
                return pf;
            }
        });

        iwc.setInfoStream(new PrintStreamInfoStream(System.out));
        iwc.setMergeScheduler(new SerialMergeScheduler());

        //TieredMergePolicy tmp = (TieredMergePolicy) iwc.getMergePolicy();
        //tmp.setFloorSegmentMB(.1);
        //ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) iwc.getMergeScheduler();
        // More concurrency (for SSD)
        //cms.setMaxMergesAndThreads(5, 3);
        final IndexWriter w = new IndexWriter(dir, iwc);

        Document doc = new Document();
        Field field;
        if (useNumericField) {
            FieldType longFieldType = new FieldType(LongField.TYPE_NOT_STORED);
            longFieldType.setNumericPrecisionStep(precStep);
            longFieldType.freeze();
            field = new LongField("number", 0L, longFieldType);
            doc.add(field);
        } else {
            FieldType longFieldType = new FieldType(TextField.TYPE_NOT_STORED);
            longFieldType.setIndexOptions(IndexOptions.DOCS_ONLY);
            longFieldType.setOmitNorms(true);
            longFieldType.setIndexRanges(true);
            longFieldType.freeze();
            field = new Field("number", new BinaryTokenStream(binaryToken.get()), longFieldType);
            doc.add(field);
        }

        long startMS = System.currentTimeMillis();

        // 64K buffer:
        InputStream is = new FileInputStream(numbersFile);
        BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"), 1 << 16);

        int count = 0;
        while (true) {
            String line = reader.readLine();
            if (line == null) {
                break;
            }
            Long v = Long.parseLong(line.trim());
            if (useNumericField) {
                field.setLongValue(v);
            } else {
                //NumericUtils.longToPrefixCoded(v, 0, binaryToken);
                longToBytes(v, binaryToken);
                //if (bytesToLong(binaryToken.get()) != v) {
                //  throw new RuntimeException("wrong long: v=" + v + " vs " + bytesToLong(binaryToken.get()));
                //}
            }
            w.addDocument(doc);
            count++;
            if (count % 200000 == 0) {
                long ms = System.currentTimeMillis();
                System.out.println("Indexed " + count + ": " + ((ms - startMS) / 1000.0) + " sec");
            }
        }
        reader.close();

        System.out.println(
                "Final Indexed " + count + ": " + ((System.currentTimeMillis() - startMS) / 1000.0) + " sec");

        // nocommit just to make debugging easier:
        //System.out.println("Optimize...");
        //w.forceMerge(1);

        System.out.println("Close...");
        w.close();
        System.out.println("After close: " + ((System.currentTimeMillis() - startMS) / 1000.0) + " sec");

        // Print CheckIndex:
        ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
        CheckIndex checker = new CheckIndex(dir);
        checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), true);
        CheckIndex.Status status = checker.checkIndex();
        System.out.println("Done CheckIndex:");
        System.out.println(bos.toString(IOUtils.UTF_8));
        if (status.clean == false) {
            throw new IllegalStateException("CheckIndex failed");
        }

        SegmentInfos infos = new SegmentInfos();
        infos.read(dir);

        long totBytes = 0;
        for (SegmentCommitInfo info : infos) {
            totBytes += info.sizeInBytes();
        }
        System.out.println("\nTotal index size: " + totBytes + " bytes");
    } else {
        System.out.println("Skip indexing: index already exists");
    }

    List<Query> queries = new ArrayList<>();
    InputStream is = new FileInputStream(queriesFile);
    BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"), 1 << 16);
    while (true) {
        String line = reader.readLine();
        if (line == null) {
            break;
        }
        String[] numbers = line.trim().split(" ");
        if (numbers.length != 2) {
            throw new IllegalArgumentException("could not parse query line: " + line);
        }
        long minValue = Long.parseLong(numbers[0]);
        long maxValue = Long.parseLong(numbers[1]);
        if (useNumericField) {
            queries.add(NumericRangeQuery.newLongRange("number", precStep, minValue, maxValue, true, true));
        } else {
            longToBytes(minValue, binaryToken);
            BytesRef minTerm = binaryToken.toBytesRef();
            longToBytes(maxValue, binaryToken);
            BytesRef maxTerm = binaryToken.toBytesRef();
            queries.add(new TermRangeQuery("number", minTerm, maxTerm, true, true));
        }

        if (queries.size() == 200) {
            break;
        }
    }

    DirectoryReader r = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(r);
    s.setQueryCache(null); // don't bench the cache

    printQueryTerms((MultiTermQuery) queries.get(0), s);

    long bestMS = Long.MAX_VALUE;
    for (int iter = 0; iter < 10; iter++) {
        long startMS = System.currentTimeMillis();
        long totalHits = 0;
        long hash = 0;
        for (Query query : queries) {
            TopDocs hits = s.search(query, 10);
            totalHits += hits.totalHits;
            hash = hash * 31 + hits.totalHits;
        }
        long ms = System.currentTimeMillis() - startMS;
        System.out.println("iter " + iter + ": " + ms + " msec; totalHits=" + totalHits + " hash=" + hash);
        if (ms < bestMS) {
            System.out.println("  **");
            bestMS = ms;
        }
    }

    /*
    long t0 = System.currentTimeMillis();
    long bytesUsed = 0;
    for(int i=0;i<1000;i++) {
      for(AtomicReaderContext ctx : r.leaves()) {
        bytesUsed += ((SegmentReader) ctx.reader()).ramBytesUsed();
      }
    }
    System.out.println((System.currentTimeMillis() - t0) + " msec for 1000 ramBytesUsed: " + (bytesUsed / 1000));
    */

    r.close();
    dir.close();
}

From source file:perf.IndexAndSearchOpenStreetMaps.java

License:Apache License

private static void createIndex(boolean fast, boolean doForceMerge, boolean doDistanceSort)
        throws IOException, InterruptedException {

    CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onMalformedInput(CodingErrorAction.REPORT)
            .onUnmappableCharacter(CodingErrorAction.REPORT);

    int BUFFER_SIZE = 1 << 16; // 64K
    InputStream is;/*from   www.j ava 2  s.c  o m*/
    if (SMALL) {
        is = Files.newInputStream(Paths.get(DATA_LOCATION, "latlon.subsetPlusAllLondon.txt"));
    } else {
        is = Files.newInputStream(Paths.get(DATA_LOCATION, "latlon.txt"));
    }
    BufferedReader reader = new BufferedReader(new InputStreamReader(is, decoder), BUFFER_SIZE);

    int NUM_THREADS;
    if (fast) {
        NUM_THREADS = 4;
    } else {
        NUM_THREADS = 1;
    }

    int CHUNK = 10000;

    long t0 = System.nanoTime();
    AtomicLong totalCount = new AtomicLong();

    for (int part = 0; part < NUM_PARTS; part++) {
        Directory dir = FSDirectory.open(Paths.get(getName(part, doDistanceSort)));

        IndexWriterConfig iwc = new IndexWriterConfig(null);
        iwc.setCodec(getCodec(fast));
        iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
        if (fast) {
            ((TieredMergePolicy) iwc.getMergePolicy()).setMaxMergedSegmentMB(Double.POSITIVE_INFINITY);
            iwc.setRAMBufferSizeMB(1024);
        } else {
            iwc.setMaxBufferedDocs(109630);
            iwc.setMergePolicy(new LogDocMergePolicy());
            iwc.setMergeScheduler(new SerialMergeScheduler());
        }
        iwc.setInfoStream(new PrintStreamInfoStream(System.out));
        IndexWriter w = new IndexWriter(dir, iwc);

        Thread[] threads = new Thread[NUM_THREADS];
        AtomicBoolean finished = new AtomicBoolean();
        Object lock = new Object();

        final int finalPart = part;

        for (int t = 0; t < NUM_THREADS; t++) {
            threads[t] = new Thread() {
                @Override
                public void run() {
                    String[] lines = new String[CHUNK];
                    int chunkCount = 0;
                    while (finished.get() == false) {
                        try {
                            int count = CHUNK;
                            synchronized (lock) {
                                for (int i = 0; i < CHUNK; i++) {
                                    String line = reader.readLine();
                                    if (line == null) {
                                        count = i;
                                        finished.set(true);
                                        break;
                                    }
                                    lines[i] = line;
                                }
                                if (finalPart == 0 && totalCount.get() + count >= 2000000000) {
                                    finished.set(true);
                                }
                            }

                            for (int i = 0; i < count; i++) {
                                String[] parts = lines[i].split(",");
                                //long id = Long.parseLong(parts[0]);
                                double lat = Double.parseDouble(parts[1]);
                                double lon = Double.parseDouble(parts[2]);
                                Document doc = new Document();
                                if (useGeoPoint) {
                                    doc.add(new GeoPointField("point", lat, lon, Field.Store.NO));
                                } else if (useGeo3D || useGeo3DLarge) {
                                    doc.add(new Geo3DPoint("point", lat, lon));
                                } else {
                                    doc.add(new LatLonPoint("point", lat, lon));
                                    if (doDistanceSort) {
                                        doc.add(new LatLonDocValuesField("point", lat, lon));
                                    }
                                }
                                w.addDocument(doc);
                                long x = totalCount.incrementAndGet();
                                if (x % 1000000 == 0) {
                                    System.out.println(x + "...");
                                }
                            }
                            chunkCount++;
                            if (false && SMALL == false && chunkCount == 20000) {
                                System.out.println("NOW BREAK EARLY");
                                break;
                            }
                        } catch (IOException ioe) {
                            throw new RuntimeException(ioe);
                        }
                    }
                }
            };
            threads[t].start();
        }

        for (Thread thread : threads) {
            thread.join();
        }

        System.out.println("Part " + part + " is done: w.maxDoc()=" + w.maxDoc());
        w.commit();
        System.out.println("done commit");
        long t1 = System.nanoTime();
        System.out.println(((t1 - t0) / 1000000000.0) + " sec to index part " + part);
        if (doForceMerge) {
            w.forceMerge(1);
            long t2 = System.nanoTime();
            System.out.println(((t2 - t1) / 1000000000.0) + " sec to force merge part " + part);
        }
        w.close();
    }

    //System.out.println(totalCount.get() + " total docs");
    //System.out.println("Force merge...");
    //w.forceMerge(1);
    //long t2 = System.nanoTime();
    //System.out.println(((t2-t1)/1000000000.0) + " sec to force merge");

    //w.close();
    //long t3 = System.nanoTime();
    //System.out.println(((t3-t2)/1000000000.0) + " sec to close");
    //System.out.println(((t3-t2)/1000000000.0) + " sec to close");
}

From source file:perf.Indexer.java

License:Apache License

private static void _main(String[] clArgs) throws Exception {

    Args args = new Args(clArgs);

    // EG: -facets Date -facets characterCount ...
    FacetsConfig facetsConfig = new FacetsConfig();
    facetsConfig.setHierarchical("Date", true);
    final Set<String> facetFields = new HashSet<String>();
    if (args.hasArg("-facets")) {
        for (String arg : args.getStrings("-facets")) {
            facetFields.add(arg);/*w  w w  .j a va2 s.co m*/
        }
    }

    final String dirImpl = args.getString("-dirImpl");
    final String dirPath = args.getString("-indexPath") + "/index";

    final Directory dir;
    OpenDirectory od = OpenDirectory.get(dirImpl);

    dir = od.open(Paths.get(dirPath));

    final String analyzer = args.getString("-analyzer");
    final Analyzer a;
    if (analyzer.equals("EnglishAnalyzer")) {
        a = new EnglishAnalyzer();
    } else if (analyzer.equals("StandardAnalyzer")) {
        a = new StandardAnalyzer();
    } else if (analyzer.equals("StandardAnalyzerNoStopWords")) {
        a = new StandardAnalyzer(CharArraySet.EMPTY_SET);
    } else if (analyzer.equals("ShingleStandardAnalyzer")) {
        a = new ShingleAnalyzerWrapper(new StandardAnalyzer(), 2, 2);
    } else if (analyzer.equals("ShingleStandardAnalyzerNoStopWords")) {
        a = new ShingleAnalyzerWrapper(new StandardAnalyzer(CharArraySet.EMPTY_SET), 2, 2);
    } else {
        throw new RuntimeException("unknown analyzer " + analyzer);
    }

    final String lineFile = args.getString("-lineDocsFile");

    // -1 means all docs in the line file:
    final int docCountLimit = args.getInt("-docCountLimit");
    final int numThreads = args.getInt("-threadCount");

    final boolean doForceMerge = args.getFlag("-forceMerge");
    final boolean verbose = args.getFlag("-verbose");

    String indexSortField = null;
    SortField.Type indexSortType = null;

    if (args.hasArg("-indexSort")) {
        indexSortField = args.getString("-indexSort");

        int i = indexSortField.indexOf(':');
        if (i == -1) {
            throw new IllegalArgumentException(
                    "-indexSort should have form field:type; got: " + indexSortField);
        }
        String typeString = indexSortField.substring(i + 1, indexSortField.length());
        if (typeString.equals("long")) {
            indexSortType = SortField.Type.LONG;
        } else if (typeString.equals("string")) {
            indexSortType = SortField.Type.STRING;
        } else {
            throw new IllegalArgumentException("-indexSort can only handle 'long' sort; got: " + typeString);
        }
        indexSortField = indexSortField.substring(0, i);
    } else {
        indexSortType = null;
    }

    final double ramBufferSizeMB = args.getDouble("-ramBufferMB");
    final int maxBufferedDocs = args.getInt("-maxBufferedDocs");

    final String defaultPostingsFormat = args.getString("-postingsFormat");
    final boolean doDeletions = args.getFlag("-deletions");
    final boolean printDPS = args.getFlag("-printDPS");
    final boolean waitForMerges = args.getFlag("-waitForMerges");
    final boolean waitForCommit = args.getFlag("-waitForCommit");
    final String mergePolicy = args.getString("-mergePolicy");
    final Mode mode;
    final boolean doUpdate = args.getFlag("-update");
    if (doUpdate) {
        mode = Mode.UPDATE;
    } else {
        mode = Mode.valueOf(args.getString("-mode", "add").toUpperCase(Locale.ROOT));
    }
    int randomDocIDMax;
    if (mode == Mode.UPDATE) {
        randomDocIDMax = args.getInt("-randomDocIDMax");
    } else {
        randomDocIDMax = -1;
    }
    final String idFieldPostingsFormat = args.getString("-idFieldPostingsFormat");
    final boolean addGroupingFields = args.getFlag("-grouping");
    final boolean useCFS = args.getFlag("-cfs");
    final boolean storeBody = args.getFlag("-store");
    final boolean tvsBody = args.getFlag("-tvs");
    final boolean bodyPostingsOffsets = args.getFlag("-bodyPostingsOffsets");
    final int maxConcurrentMerges = args.getInt("-maxConcurrentMerges");
    final boolean addDVFields = args.getFlag("-dvfields");
    final boolean doRandomCommit = args.getFlag("-randomCommit");
    final boolean useCMS = args.getFlag("-useCMS");
    final boolean disableIOThrottle = args.getFlag("-disableIOThrottle");

    if (waitForCommit == false && waitForMerges) {
        throw new RuntimeException("pass -waitForCommit if you pass -waitForMerges");
    }

    if (waitForCommit == false && doForceMerge) {
        throw new RuntimeException("pass -waitForCommit if you pass -forceMerge");
    }

    if (waitForCommit == false && doDeletions) {
        throw new RuntimeException("pass -waitForCommit if you pass -deletions");
    }

    if (useCMS == false && disableIOThrottle) {
        throw new RuntimeException("-disableIOThrottle only makes sense with -useCMS");
    }

    final double nrtEverySec;
    if (args.hasArg("-nrtEverySec")) {
        nrtEverySec = args.getDouble("-nrtEverySec");
    } else {
        nrtEverySec = -1.0;
    }

    // True to start back at the beginning if we run out of
    // docs from the line file source:
    final boolean repeatDocs = args.getFlag("-repeatDocs");

    final String facetDVFormatName;
    if (facetFields.isEmpty()) {
        facetDVFormatName = "Lucene54";
    } else {
        facetDVFormatName = args.getString("-facetDVFormat");
    }

    if (addGroupingFields && docCountLimit == -1) {
        a.close();
        throw new RuntimeException("cannot add grouping fields unless docCount is set");
    }

    args.check();

    System.out.println("Dir: " + dirImpl);
    System.out.println("Index path: " + dirPath);
    System.out.println("Analyzer: " + analyzer);
    System.out.println("Line file: " + lineFile);
    System.out.println("Doc count limit: " + (docCountLimit == -1 ? "all docs" : "" + docCountLimit));
    System.out.println("Threads: " + numThreads);
    System.out.println("Force merge: " + (doForceMerge ? "yes" : "no"));
    System.out.println("Verbose: " + (verbose ? "yes" : "no"));
    System.out.println("RAM Buffer MB: " + ramBufferSizeMB);
    System.out.println("Max buffered docs: " + maxBufferedDocs);
    System.out.println("Default postings format: " + defaultPostingsFormat);
    System.out.println("Do deletions: " + (doDeletions ? "yes" : "no"));
    System.out.println("Wait for merges: " + (waitForMerges ? "yes" : "no"));
    System.out.println("Wait for commit: " + (waitForCommit ? "yes" : "no"));
    System.out.println("IO throttle: " + (disableIOThrottle ? "no" : "yes"));
    System.out.println("Merge policy: " + mergePolicy);
    System.out.println("Mode: " + mode);
    if (mode == Mode.UPDATE) {
        System.out.println("DocIDMax: " + randomDocIDMax);
    }
    System.out.println("ID field postings format: " + idFieldPostingsFormat);
    System.out.println("Add grouping fields: " + (addGroupingFields ? "yes" : "no"));
    System.out.println("Compound file format: " + (useCFS ? "yes" : "no"));
    System.out.println("Store body field: " + (storeBody ? "yes" : "no"));
    System.out.println("Term vectors for body field: " + (tvsBody ? "yes" : "no"));
    System.out.println("Facet DV Format: " + facetDVFormatName);
    System.out.println("Facet fields: " + facetFields);
    System.out.println("Body postings offsets: " + (bodyPostingsOffsets ? "yes" : "no"));
    System.out.println("Max concurrent merges: " + maxConcurrentMerges);
    System.out.println("Add DocValues fields: " + addDVFields);
    System.out.println("Use ConcurrentMergeScheduler: " + useCMS);
    if (nrtEverySec > 0.0) {
        System.out.println("Open & close NRT reader every: " + nrtEverySec + " sec");
    } else {
        System.out.println("Open & close NRT reader every: never");
    }
    System.out.println("Repeat docs: " + repeatDocs);

    if (verbose) {
        InfoStream.setDefault(new PrintStreamInfoStream(System.out));
    }

    final IndexWriterConfig iwc = new IndexWriterConfig(a);

    if (indexSortField != null) {
        iwc.setIndexSort(new Sort(new SortField(indexSortField, indexSortType)));
    }

    if (mode == Mode.UPDATE) {
        iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
    } else {
        iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
    }

    iwc.setMaxBufferedDocs(maxBufferedDocs);
    iwc.setRAMBufferSizeMB(ramBufferSizeMB);

    // So flushed segments do/don't use CFS:
    iwc.setUseCompoundFile(useCFS);

    final AtomicBoolean indexingFailed = new AtomicBoolean();

    iwc.setMergeScheduler(getMergeScheduler(indexingFailed, useCMS, maxConcurrentMerges, disableIOThrottle));
    iwc.setMergePolicy(getMergePolicy(mergePolicy, useCFS));

    // Keep all commit points:
    if (doDeletions || doForceMerge) {
        iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
    }

    final Codec codec = new Lucene62Codec() {
        @Override
        public PostingsFormat getPostingsFormatForField(String field) {
            return PostingsFormat.forName(field.equals("id") ? idFieldPostingsFormat : defaultPostingsFormat);
        }

        private final DocValuesFormat facetsDVFormat = DocValuesFormat.forName(facetDVFormatName);
        //private final DocValuesFormat lucene42DVFormat = DocValuesFormat.forName("Lucene42");
        //private final DocValuesFormat diskDVFormat = DocValuesFormat.forName("Disk");
        //        private final DocValuesFormat lucene45DVFormat = DocValuesFormat.forName("Lucene45");
        private final DocValuesFormat directDVFormat = DocValuesFormat.forName("Direct");

        @Override
        public DocValuesFormat getDocValuesFormatForField(String field) {
            if (facetFields.contains(field) || field.equals("$facets")) {
                return facetsDVFormat;
                //} else if (field.equals("$facets_sorted_doc_values")) {
                //return diskDVFormat;
            } else {
                // Use default DVFormat for all else:
                // System.out.println("DV: field=" + field + " format=" + super.getDocValuesFormatForField(field));
                return super.getDocValuesFormatForField(field);
            }
        }
    };

    iwc.setCodec(codec);

    System.out.println("IW config=" + iwc);

    IndexWriter w = new IndexWriter(dir, iwc);

    System.out.println("Index has " + w.maxDoc() + " docs");

    final TaxonomyWriter taxoWriter;
    if (facetFields.isEmpty() == false) {
        taxoWriter = new DirectoryTaxonomyWriter(od.open(Paths.get(args.getString("-indexPath"), "facets")),
                IndexWriterConfig.OpenMode.CREATE);
    } else {
        taxoWriter = null;
    }

    // Fixed seed so group field values are always consistent:
    final Random random = new Random(17);

    LineFileDocs lineFileDocs = new LineFileDocs(lineFile, repeatDocs, storeBody, tvsBody, bodyPostingsOffsets,
            false, taxoWriter, facetFields, facetsConfig, addDVFields);

    float docsPerSecPerThread = -1f;
    //float docsPerSecPerThread = 100f;

    IndexThreads threads = new IndexThreads(random, w, indexingFailed, lineFileDocs, numThreads, docCountLimit,
            addGroupingFields, printDPS, mode, docsPerSecPerThread, null, nrtEverySec, randomDocIDMax);

    System.out.println("\nIndexer: start");
    final long t0 = System.currentTimeMillis();

    threads.start();

    while (!threads.done() && indexingFailed.get() == false) {
        Thread.sleep(100);

        // Commits once per minute on average:
        if (doRandomCommit && random.nextInt(600) == 17) {
            System.out.println("Indexer: now commit");
            long commitStartNS = System.nanoTime();
            w.commit();
            System.out.println(String.format(Locale.ROOT, "Indexer: commit took %.1f msec",
                    (System.nanoTime() - commitStartNS) / 1000000.));
        }
    }

    threads.stop();

    final long t1 = System.currentTimeMillis();
    System.out.println("\nIndexer: indexing done (" + (t1 - t0) + " msec); total " + w.maxDoc() + " docs");
    // if we update we can not tell how many docs
    if (threads.failed.get()) {
        throw new RuntimeException("exceptions during indexing");
    }
    if (mode != Mode.UPDATE && docCountLimit != -1 && w.maxDoc() != docCountLimit) {
        throw new RuntimeException("w.maxDoc()=" + w.maxDoc() + " but expected " + docCountLimit);
    }

    final Map<String, String> commitData = new HashMap<String, String>();

    if (waitForMerges) {
        w.close();
        IndexWriterConfig iwc2 = new IndexWriterConfig(a);
        iwc2.setMergeScheduler(
                getMergeScheduler(indexingFailed, useCMS, maxConcurrentMerges, disableIOThrottle));
        iwc2.setMergePolicy(getMergePolicy(mergePolicy, useCFS));
        iwc2.setCodec(codec);
        iwc2.setUseCompoundFile(useCFS);
        iwc2.setMaxBufferedDocs(maxBufferedDocs);
        iwc2.setRAMBufferSizeMB(ramBufferSizeMB);
        if (indexSortField != null) {
            iwc2.setIndexSort(new Sort(new SortField(indexSortField, indexSortType)));
        }

        w = new IndexWriter(dir, iwc2);
        long t2 = System.currentTimeMillis();
        System.out.println("\nIndexer: waitForMerges done (" + (t2 - t1) + " msec)");
    }

    if (waitForCommit) {
        commitData.put("userData", "multi");
        w.setLiveCommitData(commitData.entrySet());
        long t2 = System.currentTimeMillis();
        w.commit();
        long t3 = System.currentTimeMillis();
        System.out.println("\nIndexer: commit multi (took " + (t3 - t2) + " msec)");
    } else {
        w.rollback();
        w = null;
    }

    if (doForceMerge) {
        long forceMergeStartMSec = System.currentTimeMillis();
        w.forceMerge(1);
        long forceMergeEndMSec = System.currentTimeMillis();
        System.out.println(
                "\nIndexer: force merge done (took " + (forceMergeEndMSec - forceMergeStartMSec) + " msec)");

        commitData.put("userData", "single");
        w.setLiveCommitData(commitData.entrySet());
        w.commit();
        final long t5 = System.currentTimeMillis();
        System.out.println("\nIndexer: commit single done (took " + (t5 - forceMergeEndMSec) + " msec)");
    }

    if (doDeletions) {
        final long t5 = System.currentTimeMillis();
        // Randomly delete 5% of the docs
        final Set<Integer> deleted = new HashSet<Integer>();
        final int maxDoc = w.maxDoc();
        final int toDeleteCount = (int) (maxDoc * 0.05);
        System.out.println("\nIndexer: delete " + toDeleteCount + " docs");
        while (deleted.size() < toDeleteCount) {
            final int id = random.nextInt(maxDoc);
            if (!deleted.contains(id)) {
                deleted.add(id);
                w.deleteDocuments(new Term("id", LineFileDocs.intToID(id)));
            }
        }
        final long t6 = System.currentTimeMillis();
        System.out.println("\nIndexer: deletes done (took " + (t6 - t5) + " msec)");

        commitData.put("userData", doForceMerge ? "delsingle" : "delmulti");
        w.setLiveCommitData(commitData.entrySet());
        w.commit();
        final long t7 = System.currentTimeMillis();
        System.out.println("\nIndexer: commit delmulti done (took " + (t7 - t6) + " msec)");

        if (doUpdate || w.numDocs() != maxDoc - toDeleteCount) {
            throw new RuntimeException(
                    "count mismatch: w.numDocs()=" + w.numDocs() + " but expected " + (maxDoc - toDeleteCount));
        }
    }

    if (taxoWriter != null) {
        System.out.println("Taxonomy has " + taxoWriter.getSize() + " ords");
        taxoWriter.commit();
        taxoWriter.close();
    }

    final long tCloseStart = System.currentTimeMillis();
    if (w != null) {
        w.close();
        w = null;
    }
    if (waitForCommit) {
        System.out.println("\nIndexer: at close: " + SegmentInfos.readLatestCommit(dir));
        System.out.println("\nIndexer: close took " + (System.currentTimeMillis() - tCloseStart) + " msec");
    }

    dir.close();
    final long tFinal = System.currentTimeMillis();
    System.out.println("\nIndexer: net bytes indexed " + threads.getBytesIndexed());

    final long indexingTime;
    if (waitForCommit) {
        indexingTime = tFinal - t0;
        System.out.println("\nIndexer: finished (" + indexingTime + " msec)");
    } else {
        indexingTime = t1 - t0;
        System.out.println("\nIndexer: finished (" + indexingTime + " msec), excluding commit");
    }
    System.out.println(
            "\nIndexer: " + (threads.getBytesIndexed() / 1024. / 1024. / 1024. / (indexingTime / 3600000.))
                    + " GB/hour plain text");
}

From source file:perf.IndexGeoNames.java

License:Apache License

public static void main(String[] args) throws Exception {
    String geoNamesFile = args[0];
    File indexPath = new File(args[1]);
    int numThreads = Integer.parseInt(args[2]);
    int precStep = Integer.parseInt(args[3]);
    if (indexPath.exists()) {
        throw new IllegalArgumentException("please remove indexPath \"" + indexPath + "\" before running");
    }/*  w  w  w  .j ava  2 s .  com*/

    Directory dir = FSDirectory.open(indexPath);
    //IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_48, new StandardAnalyzer(Version.LUCENE_48));
    IndexWriterConfig iwc = new IndexWriterConfig(new StandardAnalyzer());
    //iwc.setRAMBufferSizeMB(350);
    iwc.setInfoStream(new PrintStreamInfoStream(System.out));
    if (normal == false) {
        iwc.setRAMBufferSizeMB(1024);
        iwc.setMergePolicy(NoMergePolicy.INSTANCE);
        //iwc.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
    } else {
        // 5/5 segments:
        iwc.setMaxBufferedDocs(157234);
        iwc.setRAMBufferSizeMB(-1);
    }
    //((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(3, 1);
    final IndexWriter w = new IndexWriter(dir, iwc);

    final Field.Store store = Field.Store.NO;

    final FieldType doubleFieldType = new FieldType(
            store == Field.Store.NO ? DoubleField.TYPE_NOT_STORED : DoubleField.TYPE_STORED);
    doubleFieldType.setNumericPrecisionStep(precStep);
    doubleFieldType.freeze();

    final FieldType longFieldType = new FieldType(
            store == Field.Store.NO ? LongField.TYPE_NOT_STORED : LongField.TYPE_STORED);
    longFieldType.setNumericPrecisionStep(precStep);
    longFieldType.freeze();

    final FieldType intFieldType = new FieldType(
            store == Field.Store.NO ? IntField.TYPE_NOT_STORED : IntField.TYPE_STORED);
    intFieldType.setNumericPrecisionStep(precStep);
    intFieldType.freeze();

    // 64K buffer:
    InputStream is = new FileInputStream(geoNamesFile);
    final BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"), 1 << 16);
    final AtomicInteger docsIndexed = new AtomicInteger();

    final long startMS = System.currentTimeMillis();
    Thread[] threads = new Thread[numThreads];

    // With reuse it's ~ 38% faster (41.8 sec vs 67.0 sec):
    final boolean reuseDocAndFields = false;

    for (int i = 0; i < numThreads; i++) {
        threads[i] = new Thread() {
            @Override
            public void run() {
                ParsePosition datePos = new ParsePosition(0);
                SimpleDateFormat dateParser = new SimpleDateFormat("yyyy-MM-dd", Locale.US);

                if (reuseDocAndFields) {
                    Document doc = new Document();
                    IntField geoNameID = new IntField("geoNameID", 0, intFieldType);
                    doc.add(geoNameID);
                    TextField nameField = new TextField("name", "", store);
                    doc.add(nameField);
                    TextField asciiNameField = new TextField("asciiName", "", store);
                    doc.add(asciiNameField);
                    TextField alternateNameField = new TextField("alternateNames", "", store);
                    doc.add(alternateNameField);
                    StringField featureClassField = new StringField("featureClass", "", store);
                    doc.add(featureClassField);
                    StringField featureCodeField = new StringField("featureCode", "", store);
                    doc.add(featureCodeField);
                    StringField countryCodeField = new StringField("countryCode", "", store);
                    doc.add(countryCodeField);
                    StringField cc2Field = new StringField("cc2", "", store);
                    doc.add(cc2Field);
                    StringField admin1Field = new StringField("admin1", "", store);
                    doc.add(admin1Field);
                    StringField admin2Field = new StringField("admin2", "", store);
                    doc.add(admin2Field);
                    StringField admin3Field = new StringField("admin3", "", store);
                    doc.add(admin3Field);
                    StringField admin4Field = new StringField("admin4", "", store);
                    doc.add(admin4Field);
                    StringField tzField = new StringField("timezone", "", store);
                    doc.add(tzField);

                    while (true) {
                        try {

                            // Curiously BufferedReader.readLine seems to be thread-safe...
                            String line = reader.readLine();
                            if (line == null) {
                                break;
                            }
                            String[] values = line.split("\t");

                            geoNameID.setIntValue(Integer.parseInt(values[0]));
                            nameField.setStringValue(values[1]);
                            asciiNameField.setStringValue(values[2]);
                            alternateNameField.setStringValue(values[3]);

                            /*
                            if (values[4].isEmpty() == false) {
                              double v = Double.parseDouble(values[4]);
                              doc.add(new DoubleField("latitude", v, doubleFieldType));
                              doc.add(new DoubleDocValuesField("latitude", v));
                            }
                            if (values[5].isEmpty() == false) {
                              double v = Double.parseDouble(values[5]);
                              doc.add(new DoubleField("longitude", v, doubleFieldType));
                              doc.add(new DoubleDocValuesField("longitude", v));
                            }
                            */

                            featureClassField.setStringValue(values[6]);
                            featureCodeField.setStringValue(values[7]);
                            countryCodeField.setStringValue(values[8]);
                            cc2Field.setStringValue(values[9]);
                            admin1Field.setStringValue(values[10]);
                            admin2Field.setStringValue(values[11]);
                            admin3Field.setStringValue(values[12]);
                            admin4Field.setStringValue(values[13]);

                            /*
                            if (values[14].isEmpty() == false) {
                              long v = Long.parseLong(values[14]);
                              doc.add(new LongField("population", v, longFieldType));
                              doc.add(new NumericDocValuesField("population", v));
                            }
                            if (values[15].isEmpty() == false) {
                              long v = Long.parseLong(values[15]);
                              doc.add(new LongField("elevation", v, longFieldType));
                              doc.add(new NumericDocValuesField("elevation", v));
                            }
                            if (values[16].isEmpty() == false) {
                              doc.add(new IntField("dem", Integer.parseInt(values[16]), intFieldType));
                            }
                            */

                            tzField.setStringValue(values[17]);
                            /*
                            if (values[18].isEmpty() == false) {
                              datePos.setIndex(0);
                              Date date = dateParser.parse(values[18], datePos);
                              doc.add(new LongField("modified", date.getTime(), longFieldType));
                            }
                            */
                            w.addDocument(doc);
                            int count = docsIndexed.incrementAndGet();
                            if (count % 200000 == 0) {
                                long ms = System.currentTimeMillis();
                                System.out.println(count + ": " + ((ms - startMS) / 1000.0) + " sec");
                            }
                        } catch (Exception e) {
                            throw new RuntimeException(e);
                        }
                    }
                } else {
                    while (true) {
                        try {

                            // Curiously BufferedReader.readLine seems to be thread-safe...
                            String line = reader.readLine();
                            if (line == null) {
                                break;
                            }
                            String[] values = line.split("\t");

                            Document doc = new Document();

                            doc.add(new IntField("geoNameID", Integer.parseInt(values[0]), intFieldType));
                            doc.add(new TextField("name", values[1], store));
                            doc.add(new TextField("asciiName", values[2], store));
                            doc.add(new TextField("alternateNames", values[3], store));

                            if (values[4].isEmpty() == false) {
                                double v = Double.parseDouble(values[4]);
                                doc.add(new DoubleField("latitude", v, doubleFieldType));
                                doc.add(new DoubleDocValuesField("latitude", v));
                            }
                            if (values[5].isEmpty() == false) {
                                double v = Double.parseDouble(values[5]);
                                doc.add(new DoubleField("longitude", v, doubleFieldType));
                                doc.add(new DoubleDocValuesField("longitude", v));
                            }

                            doc.add(new StringField("featureClass", values[6], store));
                            doc.add(new StringField("featureCode", values[7], store));
                            doc.add(new StringField("countryCode", values[8], store));
                            doc.add(new StringField("cc2", values[9], store));
                            doc.add(new StringField("admin1Code", values[10], store));
                            doc.add(new StringField("admin2Code", values[11], store));
                            doc.add(new StringField("admin3Code", values[12], store));
                            doc.add(new StringField("admin4Code", values[13], store));

                            if (values[14].isEmpty() == false) {
                                long v = Long.parseLong(values[14]);
                                doc.add(new LongField("population", v, longFieldType));
                                doc.add(new NumericDocValuesField("population", v));
                            }
                            if (values[15].isEmpty() == false) {
                                long v = Long.parseLong(values[15]);
                                doc.add(new LongField("elevation", v, longFieldType));
                                doc.add(new NumericDocValuesField("elevation", v));
                            }
                            if (values[16].isEmpty() == false) {
                                doc.add(new IntField("dem", Integer.parseInt(values[16]), intFieldType));
                            }

                            doc.add(new StringField("timezone", values[17], store));

                            if (values[18].isEmpty() == false) {
                                datePos.setIndex(0);
                                Date date = dateParser.parse(values[18], datePos);
                                doc.add(new LongField("modified", date.getTime(), longFieldType));
                            }
                            w.addDocument(doc);
                            int count = docsIndexed.incrementAndGet();
                            if (count % 200000 == 0) {
                                long ms = System.currentTimeMillis();
                                System.out.println(count + ": " + ((ms - startMS) / 1000.0) + " sec");
                            }
                        } catch (Exception e) {
                            throw new RuntimeException(e);
                        }
                    }
                }
            }
        };
        threads[i].start();
    }
    DirectoryReader r = DirectoryReader.open(w, true);
    for (int i = 0; i < 100; i++) {
        DirectoryReader r2 = DirectoryReader.openIfChanged(r);
        if (r2 != null) {
            r.close();
            r = r2;
        }
        Thread.sleep(500);
    }
    if (r != null) {
        r.close();
        r = null;
    }
    for (int i = 0; i < numThreads; i++) {
        threads[i].join();
    }
    long ms = System.currentTimeMillis();
    System.out.println(docsIndexed + ": " + ((ms - startMS) / 1000.0) + " sec");
    //System.out.println("tot conflicts: " + BytesRefHash.totConflict);
    //w.shutdown(normal);
    w.close();
    dir.close();
}

From source file:perf.IndexNumbers.java

License:Apache License

public static void main(String[] args) throws Exception {
    String numbersFile = args[0];
    File indexPath = new File(args[1]);
    int numThreads = Integer.parseInt(args[2]);
    int precStep = Integer.parseInt(args[3]);
    if (indexPath.exists()) {
        throw new IllegalArgumentException("please remove indexPath \"" + indexPath + "\" before running");
    }/*from  ww w  .  ja va 2s .co  m*/

    Directory dir = FSDirectory.open(indexPath);
    IndexWriterConfig iwc = new IndexWriterConfig(new StandardAnalyzer());
    //iwc.setRAMBufferSizeMB(350);
    //iwc.setInfoStream(new PrintStreamInfoStream(System.out));
    if (normal == false) {
        iwc.setRAMBufferSizeMB(64);
        iwc.setMergePolicy(NoMergePolicy.INSTANCE);
    } else {
        // 5/5 segments:
        // 3273681 docs in twitter timestamps
        iwc.setMaxBufferedDocs(59522);
        iwc.setRAMBufferSizeMB(-1);
    }
    //((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(3, 1);
    final IndexWriter w = new IndexWriter(dir, iwc);

    final Field.Store store = Field.Store.NO;

    final FieldType doubleFieldType = new FieldType(
            store == Field.Store.NO ? DoubleField.TYPE_NOT_STORED : DoubleField.TYPE_STORED);
    doubleFieldType.setNumericPrecisionStep(precStep);
    doubleFieldType.freeze();

    final FieldType longFieldType = new FieldType(
            store == Field.Store.NO ? LongField.TYPE_NOT_STORED : LongField.TYPE_STORED);
    longFieldType.setNumericPrecisionStep(precStep);
    longFieldType.freeze();

    final FieldType intFieldType = new FieldType(
            store == Field.Store.NO ? IntField.TYPE_NOT_STORED : IntField.TYPE_STORED);
    intFieldType.setNumericPrecisionStep(precStep);
    intFieldType.freeze();

    // 64K buffer:
    InputStream is = new FileInputStream(numbersFile);
    final BufferedReader reader = new BufferedReader(new InputStreamReader(is, "UTF-8"), 1 << 16);
    final AtomicInteger docsIndexed = new AtomicInteger();

    final long startMS = System.currentTimeMillis();
    Thread[] threads = new Thread[numThreads];
    for (int i = 0; i < numThreads; i++) {
        threads[i] = new Thread() {
            @Override
            public void run() {
                ParsePosition datePos = new ParsePosition(0);
                SimpleDateFormat dateParser = new SimpleDateFormat("yyyy-MM-dd", Locale.US);

                Document doc = new Document();
                Field field = new LongField("number", 0L, longFieldType);
                doc.add(field);

                while (true) {
                    try {
                        // Curiously BufferedReader.readLine seems to be thread-safe...
                        String line = reader.readLine();
                        if (line == null) {
                            break;
                        }
                        field.setLongValue(Long.parseLong(line.trim()));
                        w.addDocument(doc);
                        int count = docsIndexed.incrementAndGet();
                        if (count % 200000 == 0) {
                            long ms = System.currentTimeMillis();
                            System.out.println(count + ": " + ((ms - startMS) / 1000.0) + " sec");
                        }
                    } catch (Exception e) {
                        throw new RuntimeException(e);
                    }
                }
            }
        };
        threads[i].start();
    }
    for (int i = 0; i < numThreads; i++) {
        threads[i].join();
    }
    long ms = System.currentTimeMillis();
    System.out.println(docsIndexed + ": " + ((ms - startMS) / 1000.0) + " sec");
    //System.out.println("tot conflicts: " + BytesRefHash.totConflict);
    if (normal == false) {
        w.abortMerges();
    }
    w.close();
    dir.close();
}

From source file:perf.NRTPerfTest.java

License:Apache License

public static void main(String[] args) throws Exception {

    final String dirImpl = args[0];
    final String dirPath = args[1];
    final String commit = args[2];
    final String lineDocFile = args[3];
    final long seed = Long.parseLong(args[4]);
    final double docsPerSec = Double.parseDouble(args[5]);
    final double runTimeSec = Double.parseDouble(args[6]);
    final int numSearchThreads = Integer.parseInt(args[7]);
    int numIndexThreads = Integer.parseInt(args[8]);
    if (numIndexThreads > docsPerSec) {
        System.out.println("INFO: numIndexThreads higher than docsPerSec, adjusting numIndexThreads");
        numIndexThreads = (int) Math.max(1, docsPerSec);
    }//from w w w  . j a v  a  2 s.  c  o m
    final double reopenPerSec = Double.parseDouble(args[9]);
    final Mode mode = Mode.valueOf(args[10].toUpperCase(Locale.ROOT));
    statsEverySec = Integer.parseInt(args[11]);
    final boolean doCommit = args[12].equals("yes");
    final double mergeMaxWriteMBPerSec = Double.parseDouble(args[13]);
    if (mergeMaxWriteMBPerSec != 0.0) {
        throw new IllegalArgumentException("mergeMaxWriteMBPerSec must be 0.0 until LUCENE-3202 is done");
    }
    final String tasksFile = args[14];
    if (Files.notExists(Paths.get(tasksFile))) {
        throw new FileNotFoundException("tasks file not found " + tasksFile);
    }

    final boolean hasProcMemInfo = Files.exists(Paths.get("/proc/meminfo"));

    System.out.println("DIR=" + dirImpl);
    System.out.println("Index=" + dirPath);
    System.out.println("Commit=" + commit);
    System.out.println("LineDocs=" + lineDocFile);
    System.out.println("Docs/sec=" + docsPerSec);
    System.out.println("Run time sec=" + runTimeSec);
    System.out.println("NumSearchThreads=" + numSearchThreads);
    System.out.println("NumIndexThreads=" + numIndexThreads);
    System.out.println("Reopen/sec=" + reopenPerSec);
    System.out.println("Mode=" + mode);
    System.out.println("tasksFile=" + tasksFile);

    System.out.println("Record stats every " + statsEverySec + " seconds");
    final int count = (int) ((runTimeSec / statsEverySec) + 2);
    docsIndexedByTime = new AtomicInteger[count];
    searchesByTime = new AtomicInteger[count];
    totalUpdateTimeByTime = new AtomicLong[count];
    final AtomicInteger reopensByTime[] = new AtomicInteger[count];
    for (int i = 0; i < count; i++) {
        docsIndexedByTime[i] = new AtomicInteger();
        searchesByTime[i] = new AtomicInteger();
        totalUpdateTimeByTime[i] = new AtomicLong();
        reopensByTime[i] = new AtomicInteger();
    }

    System.out.println(
            "Max merge MB/sec = " + (mergeMaxWriteMBPerSec <= 0.0 ? "unlimited" : mergeMaxWriteMBPerSec));
    final Random random = new Random(seed);

    final LineFileDocs docs = new LineFileDocs(lineDocFile, true, false, false, false, false, null,
            new HashSet<String>(), null, true);

    final Directory dir0;
    if (dirImpl.equals("MMapDirectory")) {
        dir0 = new MMapDirectory(Paths.get(dirPath));
    } else if (dirImpl.equals("NIOFSDirectory")) {
        dir0 = new NIOFSDirectory(Paths.get(dirPath));
    } else if (dirImpl.equals("SimpleFSDirectory")) {
        dir0 = new SimpleFSDirectory(Paths.get(dirPath));
    } else {
        docs.close();
        throw new RuntimeException("unknown directory impl \"" + dirImpl + "\"");
    }
    //final NRTCachingDirectory dir = new NRTCachingDirectory(dir0, 10, 200.0, mergeMaxWriteMBPerSec);
    final NRTCachingDirectory dir = new NRTCachingDirectory(dir0, 20, 400.0);
    //final MergeScheduler ms = dir.getMergeScheduler();
    //final Directory dir = dir0;
    //final MergeScheduler ms = new ConcurrentMergeScheduler();

    final String field = "body";

    // Open an IW on the requested commit point, but, don't
    // delete other (past or future) commit points:
    // TODO take Analyzer as parameter
    StandardAnalyzer analyzer = new StandardAnalyzer(CharArraySet.EMPTY_SET);
    final IndexWriterConfig conf = new IndexWriterConfig(analyzer);
    conf.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
    conf.setRAMBufferSizeMB(256.0);
    //iwc.setMergeScheduler(ms);

    final Codec codec = new Lucene62Codec() {
        @Override
        public PostingsFormat getPostingsFormatForField(String field) {
            if (field.equals("id")) {
                return PostingsFormat.forName("Memory");
            } else {
                return PostingsFormat.forName("Lucene50");
            }
        }

        private final DocValuesFormat direct = DocValuesFormat.forName("Direct");

        @Override
        public DocValuesFormat getDocValuesFormatForField(String field) {
            return direct;
        }
    };

    conf.setCodec(codec);

    /*
    iwc.setMergePolicy(new LogByteSizeMergePolicy());
    ((LogMergePolicy) iwc.getMergePolicy()).setUseCompoundFile(false);
    ((LogMergePolicy) iwc.getMergePolicy()).setMergeFactor(30);
    ((LogByteSizeMergePolicy) iwc.getMergePolicy()).setMaxMergeMB(10000.0);
    System.out.println("USING LOG BS MP");
     */

    TieredMergePolicy tmp = new TieredMergePolicy();
    tmp.setNoCFSRatio(0.0);
    tmp.setMaxMergedSegmentMB(1000000.0);
    //tmp.setReclaimDeletesWeight(3.0);
    //tmp.setMaxMergedSegmentMB(7000.0);
    conf.setMergePolicy(tmp);

    if (!commit.equals("none")) {
        conf.setIndexCommit(PerfUtils.findCommitPoint(commit, dir));
    }

    // Make sure merges run @ higher prio than indexing:
    final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) conf.getMergeScheduler();
    cms.setMaxMergesAndThreads(4, 1);

    conf.setMergedSegmentWarmer(new MergedReaderWarmer(field));

    final IndexWriter w = new IndexWriter(dir, conf);
    // w.setInfoStream(System.out);

    IndexThreads.UpdatesListener updatesListener = new IndexThreads.UpdatesListener() {
        long startTimeNS;

        @Override
        public void beforeUpdate() {
            startTimeNS = System.nanoTime();
        }

        @Override
        public void afterUpdate() {
            int idx = currentQT.get();
            totalUpdateTimeByTime[idx].addAndGet(System.nanoTime() - startTimeNS);
            docsIndexedByTime[idx].incrementAndGet();
        }
    };
    IndexThreads indexThreads = new IndexThreads(random, w, new AtomicBoolean(false), docs, numIndexThreads, -1,
            false, false, mode, (float) (docsPerSec / numIndexThreads), updatesListener, -1.0, w.maxDoc());

    // NativePosixUtil.mlockTermsDict(startR, "id");
    final SearcherManager manager = new SearcherManager(w, null);
    IndexSearcher s = manager.acquire();
    try {
        System.out.println("Reader=" + s.getIndexReader());
    } finally {
        manager.release(s);
    }

    final DirectSpellChecker spellChecker = new DirectSpellChecker();
    final IndexState indexState = new IndexState(manager, null, field, spellChecker, "PostingsHighlighter",
            null);
    final QueryParser qp = new QueryParser(field, analyzer);
    TaskParser taskParser = new TaskParser(indexState, qp, field, 10, random, true);
    final TaskSource tasks = new RandomTaskSource(taskParser, tasksFile, random) {
        @Override
        public void taskDone(Task task, long queueTimeNS, int toalHitCount) {
            searchesByTime[currentQT.get()].incrementAndGet();
        }
    };
    System.out.println("Task repeat count 1");
    System.out.println("Tasks file " + tasksFile);
    System.out.println("Num task per cat 20");
    final TaskThreads taskThreads = new TaskThreads(tasks, indexState, numSearchThreads);

    final ReopenThread reopenThread = new ReopenThread(reopenPerSec, manager, reopensByTime, runTimeSec);
    reopenThread.setName("ReopenThread");
    reopenThread.setPriority(4 + Thread.currentThread().getPriority());
    System.out.println("REOPEN PRI " + reopenThread.getPriority());

    indexThreads.start();
    reopenThread.start();
    taskThreads.start();

    Thread.currentThread().setPriority(5 + Thread.currentThread().getPriority());
    System.out.println("TIMER PRI " + Thread.currentThread().getPriority());

    //System.out.println("Start: " + new Date());

    final long startMS = System.currentTimeMillis();
    final long stopMS = startMS + (long) (runTimeSec * 1000);
    int lastQT = -1;
    while (true) {
        final long t = System.currentTimeMillis();
        if (t >= stopMS) {
            break;
        }
        final int qt = (int) ((t - startMS) / statsEverySec / 1000);
        currentQT.set(qt);
        if (qt != lastQT) {
            final int prevQT = lastQT;
            lastQT = qt;
            if (prevQT > 0) {
                final String other;
                if (hasProcMemInfo) {
                    other = " D=" + getLinuxDirtyBytes();
                } else {
                    other = "";
                }
                int prev = prevQT - 1;
                System.out.println(String.format("QT %d searches=%d docs=%d reopens=%s totUpdateTime=%d", prev,
                        searchesByTime[prev].get(), docsIndexedByTime[prev].get(),
                        reopensByTime[prev].get() + other,
                        TimeUnit.NANOSECONDS.toMillis(totalUpdateTimeByTime[prev].get())));
            }
        }
        Thread.sleep(25);
    }

    taskThreads.stop();
    reopenThread.join();
    indexThreads.stop();

    System.out.println("By time:");
    for (int i = 0; i < searchesByTime.length - 2; i++) {
        System.out.println(String.format("  %d searches=%d docs=%d reopens=%d totUpdateTime=%d",
                i * statsEverySec, searchesByTime[i].get(), docsIndexedByTime[i].get(), reopensByTime[i].get(),
                TimeUnit.NANOSECONDS.toMillis(totalUpdateTimeByTime[i].get())));
    }

    manager.close();
    if (doCommit) {
        w.close();
    } else {
        w.rollback();
    }
}

From source file:perLucene.Server.java

License:Open Source License

private static void initIndexWriter() {

    try {/*from  w  w  w .  j  av  a 2s. c  o  m*/
        Directory dir = new MMapDirectory(new File("/mnt/perLucene"));

        // FSDirectory.setMaxMergeWriteMBPerSec.
        // use this if merges interfere with the search

        // use a unix native directory to tell the os not to cache merges

        NRTCachingDirectory index = new NRTCachingDirectory(dir, 500, 1000);

        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_41, null);
        // used to reduce latency of obtaining a reader after a big merge
        // config.setMergedSegmentWarmer(new
        // IndexWriter.IndexReaderWarmer());

        config.setMergePolicy(new LogByteSizeMergePolicy());

        w = new IndexWriter(index, config);
    } catch (IOException e) {
        System.out.println("Stacktrace " + e.toString());
    }

}

From source file:proj.zoie.impl.indexing.internal.DiskSearchIndex.java

License:Apache License

/**
 * Opens an index modifier./*from  w  ww .  ja  v a  2  s. c  o  m*/
 * @param analyzer Analyzer
 * @return IndexModifer instance
 */
public IndexWriter openIndexWriter(Analyzer analyzer, Similarity similarity) throws IOException {
    if (_indexWriter != null)
        return _indexWriter;

    Directory directory = _dirMgr.getDirectory(true);

    log.info("opening index writer at: " + _dirMgr.getPath());

    ZoieMergePolicy mergePolicy = new ZoieMergePolicy();
    mergePolicy.setMergePolicyParams(_mergePolicyParams);

    // hao: autocommit is set to false with this constructor
    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_34, analyzer);
    config.setOpenMode(OpenMode.CREATE_OR_APPEND);
    config.setIndexDeletionPolicy(_deletionPolicy);
    config.setMergeScheduler(_mergeScheduler);
    config.setMergePolicy(mergePolicy);
    config.setReaderPooling(false);
    if (similarity != null) {
        config.setSimilarity(similarity);
    }
    config.setRAMBufferSizeMB(5);
    IndexWriter idxWriter = new IndexWriter(directory, config);

    _indexWriter = idxWriter;
    return idxWriter;
}