Example usage for org.apache.lucene.analysis CharArrayMap get

List of usage examples for org.apache.lucene.analysis CharArrayMap get

Introduction

In this page you can find the example usage for org.apache.lucene.analysis CharArrayMap get.

Prototype

public V get(char[] text, int off, int len) 

Source Link

Document

returns the value of the mapping of len chars of text starting at off

Usage

From source file:org.apache.solr.util.TestCharArrayMap.java

License:Apache License

public void doRandom(int iter, boolean ignoreCase) {
    CharArrayMap map = new CharArrayMap(1, ignoreCase);
    HashMap hmap = new HashMap();

    char[] key;/*from   w w w  .j ava2 s. c  o  m*/
    for (int i = 0; i < iter; i++) {
        int len = r.nextInt(5);
        key = new char[len];
        for (int j = 0; j < key.length; j++) {
            key[j] = (char) r.nextInt(127);
        }
        String keyStr = new String(key);
        String hmapKey = ignoreCase ? keyStr.toLowerCase() : keyStr;

        int val = r.nextInt();

        Object o1 = map.put(key, val);
        Object o2 = hmap.put(hmapKey, val);
        assertEquals(o1, o2);

        // add it again with the string method
        assertEquals(val, map.put(keyStr, val));

        assertEquals(val, map.get(key, 0, key.length));
        assertEquals(val, map.get(key));
        assertEquals(val, map.get(keyStr));

        assertEquals(hmap.size(), map.size());
    }

    assertEquals(map, hmap);
    assertEquals(hmap, map);
}

From source file:org.tallison.lucene.contrast.QueryToCorpusContraster.java

License:Apache License

public List<TermIDF> contrast(Query query, String fieldName, int numResults) throws IOException {
    TopScoreDocCollector results = TopScoreDocCollector.create(maxDocs, maxDocs + 10000);
    searcher.search(query, results);/*from  w  ww  . j  ava2  s  .c  o m*/

    ScoreDoc[] scoreDocs = results.topDocs().scoreDocs;
    //if there are fewer documents than minTermFreq
    //return empty list now
    if (scoreDocs.length < minTermFreq) {
        return new ArrayList<TermIDF>();
    }

    //total hack
    int initialSize = scoreDocs.length * 100;
    CharArrayMap<MutableValueInt> map = new CharArrayMap<MutableValueInt>(initialSize, ignoreCase);
    CharArraySet tmpSet = new CharArraySet(100, ignoreCase);
    Set<String> selector = new HashSet<String>();
    selector.add(fieldName);

    for (ScoreDoc scoreDoc : scoreDocs) {
        //get terms from doc
        processDoc(scoreDoc.doc, fieldName, selector, tmpSet);
        //now update global doc freqs
        Iterator<Object> it = tmpSet.iterator();
        while (it.hasNext()) {
            char[] token = (char[]) it.next();
            MutableValueInt docCount = map.get(token, 0, token.length);
            if (docCount == null) {
                docCount = new MutableValueInt();
                docCount.value = 1;
            } else {
                docCount.value++;
            }
            map.put(token, docCount);
        }
        tmpSet.clear();
    }

    return getResults(fieldName, map, numResults);
}