Example usage for org.apache.lucene.search Explanation getValue

List of usage examples for org.apache.lucene.search Explanation getValue

Introduction

In this page you can find the example usage for org.apache.lucene.search Explanation getValue.

Prototype

public Number getValue() 

Source Link

Document

The value assigned to this explanation node.

Usage

From source file:BM25LSimilarity.java

License:Apache License

private Explanation explainTFNorm(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
    List<Explanation> subs = new ArrayList<>();
    subs.add(freq);/*from ww  w.java2 s. c om*/
    subs.add(Explanation.match(k1, "parameter k1"));
    if (norms == null) {
        subs.add(Explanation.match(0, "parameter b (norms omitted for field)"));
        return Explanation.match((freq.getValue() * (k1 + 1)) / (freq.getValue() + k1),
                "tfNorm, computed from:", subs);
    } else {
        float doclen = decodeNormValue((byte) norms.get(doc));
        subs.add(Explanation.match(b, "parameter b"));
        subs.add(Explanation.match(stats.avgdl, "avgFieldLength"));
        subs.add(Explanation.match(doclen, "fieldLength"));
        return Explanation.match(
                (freq.getValue() * (k1 + 1)) / (freq.getValue() + k1 * (1 - b + b * doclen / stats.avgdl)),
                "tfNorm, computed from:", subs);
    }
}

From source file:BM25LSimilarity.java

License:Apache License

private Explanation explainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
    Explanation boostExpl = Explanation.match(stats.boost, "boost");
    List<Explanation> subs = new ArrayList<>();
    if (boostExpl.getValue() != 1.0f) {
        subs.add(boostExpl);/*from w ww .  jav  a2  s.  c om*/
    }
    subs.add(stats.idf);
    Explanation tfNormExpl = explainTFNorm(doc, freq, stats, norms);
    subs.add(tfNormExpl);
    return Explanation.match(boostExpl.getValue() * stats.idf.getValue() * tfNormExpl.getValue(),
            "score(doc=" + doc + ",freq=" + freq + "), product of:", subs);
}

From source file:cc.wikitools.lucene.WikipediaSearcher.java

License:Apache License

public float scoreArticle(String q, int wikiId) {
    try {/*from ww  w. j  a  v a 2s  .  c o  m*/
        int internalId = internalIdFromWikipediaId(wikiId);
        if (internalId == -1)
            return 0.0f;
        Query query = parserArticle.parse(q);
        Explanation explanation = searcher.explain(query, internalId);
        System.out.println(explanation);
        return explanation.getValue();
    } catch (Exception e) {
        e.printStackTrace();
        return 0.0f;
    }
}

From source file:cc.wikitools.lucene.WikipediaSearcher.java

License:Apache License

public float scoreArticle(String q, String title) {
    try {/*ww  w  . ja  v  a 2  s.c  o m*/
        int internalId = internalIdFromWikipediaTitle(title);
        if (internalId == -1)
            return 0.0f;
        Query query = parserArticle.parse(q);
        Explanation explanation = searcher.explain(query, internalId);
        System.out.println(explanation);
        return explanation.getValue();
    } catch (Exception e) {
        e.printStackTrace();
        return 0.0f;
    }
}

From source file:com.browseengine.bobo.query.RecencyBoostScorerBuilder.java

License:Apache License

public Explanation explain(IndexReader reader, int doc, Explanation innerExplaination) throws IOException {
    if (reader instanceof BoboIndexReader) {
        BoboIndexReader boboReader = (BoboIndexReader) reader;
        Object dataObj = boboReader.getFacetData(_timeFacetName);
        if (dataObj instanceof FacetDataCache<?>) {
            FacetDataCache<Long> facetDataCache = (FacetDataCache<Long>) (boboReader
                    .getFacetData(_timeFacetName));
            final BigSegmentedArray orderArray = facetDataCache.orderArray;
            final TermLongList termList = (TermLongList) facetDataCache.valArray;
            final long now = System.currentTimeMillis();
            Explanation finalExpl = new Explanation();
            finalExpl.addDetail(innerExplaination);
            float rawScore = innerExplaination.getValue();
            long timeVal = termList.getPrimitiveValue(orderArray.get(doc));
            float timeScore = computeTimeFactor(timeVal);
            float finalScore = combineScores(timeScore, rawScore);
            finalExpl.setValue(finalScore);
            finalExpl.setDescription("final score = (time score: " + timeScore + ") * (raw score: " + rawScore
                    + "), timeVal: " + timeVal);
            return finalExpl;
        } else {/*  w w  w. j av a  2s . co m*/
            throw new IllegalStateException("underlying facet data must be of type FacetDataCache<Long>");
        }
    } else {
        throw new IllegalStateException("reader not instance of " + BoboIndexReader.class);
    }
}

From source file:com.core.nlp.similarity.TFIDFSimilarity.java

License:Apache License

private Explanation explainQuery(IDFStats stats) {
    List<Explanation> subs = new ArrayList<>();

    Explanation boostExpl = Explanation.match(stats.queryBoost, "boost");
    if (stats.queryBoost != 1.0f)
        subs.add(boostExpl);//from w ww.  j av  a2s  .  c  o m
    subs.add(stats.idf);

    Explanation queryNormExpl = Explanation.match(stats.queryNorm, "queryNorm");
    subs.add(queryNormExpl);

    return Explanation.match(boostExpl.getValue() * stats.idf.getValue() * queryNormExpl.getValue(),
            "queryWeight, product of:", subs);
}

From source file:com.core.nlp.similarity.TFIDFSimilarity.java

License:Apache License

private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
    Explanation tfExplanation = Explanation.match(tf(freq.getValue()),
            "tf(freq=" + freq.getValue() + "), with freq of:", freq);
    Explanation fieldNormExpl = Explanation.match(norms != null ? decodeNormValue(norms.get(doc)) : 1.0f,
            "fieldNorm(doc=" + doc + ")");

    return Explanation.match(tfExplanation.getValue() * stats.idf.getValue() * fieldNormExpl.getValue(),
            "fieldWeight in " + doc + ", product of:", tfExplanation, stats.idf, fieldNormExpl);
}

From source file:com.core.nlp.similarity.TFIDFSimilarity.java

License:Apache License

private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
    Explanation queryExpl = explainQuery(stats);
    Explanation fieldExpl = explainField(doc, freq, stats, norms);
    if (queryExpl.getValue() == 1f) {
        return fieldExpl;
    }/*  ww  w  .ja  v a2  s  . c o  m*/
    return Explanation.match(queryExpl.getValue() * fieldExpl.getValue(),
            "score(doc=" + doc + ",freq=" + freq.getValue() + "), product of:", queryExpl, fieldExpl);
}

From source file:com.factweavers.elasticsearch.payloadscorefunction.PayloadScoringFunction.java

License:Apache License

@Override
public Explanation explainScore(int docId, Explanation subQueryScore) {
    Explanation exp = new Explanation();
    double score = score(docId, subQueryScore.getValue());
    exp.setValue(CombineFunction.toFloat(score));
    exp.setDescription(/*from w ww .j  a  v a2 s.c o  m*/
            String.format(Locale.ROOT, "field value function: (Payload['%s']['%s'])", field, values));
    return exp;
}

From source file:com.o19s.bm25f.BM25FSimilarity.java

License:Apache License

private Explanation explainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
    Explanation boostExpl = Explanation.match(stats.boost, "boost");
    List<Explanation> subs = new ArrayList<>();
    if (boostExpl.getValue() != 1.0f)
        subs.add(boostExpl);//from w w  w  .ja v  a  2s  . co  m
    subs.add(stats.idf);
    Explanation tfNormExpl = explainTFNorm(doc, freq, stats, norms);
    subs.add(tfNormExpl);
    return Explanation.match(boostExpl.getValue() * stats.idf.getValue() * tfNormExpl.getValue(),
            "score(doc=" + doc + ",freq=" + freq + "), product of:", subs);
}