List of usage examples for org.apache.lucene.queries.function ValueSource getValues
public abstract FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException;
From source file:alba.solr.core.DynamicMultiFunction.java
License:Apache License
public static FunctionValues[] valsArr(List<ValueSource> sources, Map fcontext, LeafReaderContext readerContext) throws IOException { final FunctionValues[] valsArr = new FunctionValues[sources.size()]; int i = 0;//from w ww . j a va 2 s . c om for (ValueSource source : sources) { if (source != null) { valsArr[i] = source.getValues(fcontext, readerContext); } else { valsArr[i] = null; } i++; } return valsArr; }
From source file:alba.solr.docvalues.DynamicDocValuesHelper.java
License:Apache License
public Object eval(int doc) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { // TODO Auto-generated method stub /*if (doc < 0 || doc > this.readerContext.reader().maxDoc()) { return null;//from ww w . j av a 2 s.c o m }*/ Map<String, Object> params = new HashMap<String, Object>(); for (String s : args.keySet()) { if (args.get(s).startsWith("\"")) { params.put(s, args.get(s)); } else if (NumberUtils.isNumber(args.get(s))) { Object objVal; try { objVal = Long.parseLong(args.get(s)); } catch (NumberFormatException nfe1) { try { objVal = Float.parseFloat(args.get(s)); } catch (NumberFormatException nfe2) { objVal = s; } } if (objVal != null) { params.put(s, objVal); } else { params.put(s, "N/A"); } } else if ("false".equals(args.get(s).toLowerCase())) { params.put(s, false); } else if ("true".equals(args.get(s).toLowerCase())) { params.put(s, true); } else { SchemaField f = fp.getReq().getSchema().getField(args.get(s)); ValueSource vs = f.getType().getValueSource(f, fp); Object objVal = null; try { objVal = vs.getValues(this.context, this.readerContext).longVal(doc); params.put(s, objVal); } catch (IOException | UnsupportedOperationException e) { // TODO Auto-generated catch block // TODO Log properly try { objVal = vs.getValues(this.context, this.readerContext).floatVal(doc); } catch (IOException | UnsupportedOperationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); try { objVal = vs.getValues(this.context, this.readerContext).strVal(doc); } catch (IOException | UnsupportedOperationException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } logger.error("error converting values ", e); } if (objVal != null) { params.put(s, objVal); } else { params.put(s, "N/A"); } } } CallableFunction cf = functions.get(this.functionName); if (cf == null) { logger.error("unable to get function " + this.functionName); } if (cf != null) { List<Object> fnParams = new ArrayList<Object>(); Parameter[] methodParameters = cf.getMethod().getParameters(); //TODO spostare quanto pi codice possibile in fase di inizializzazione for (Parameter p : methodParameters) { if (p.isAnnotationPresent(Param.class)) { Param paramAnnotation = p.getAnnotation(Param.class); fnParams.add(params.get(paramAnnotation.name())); } } return cf.getMethod().invoke(cf.getInstance(), fnParams.toArray()); } else { return null; } }
From source file:alba.solr.docvalues.FloatFunction.java
License:Apache License
@Override public float floatVal(int doc) { // TODO Auto-generated method stub Map<String, Object> params = new HashMap<String, Object>(); for (String s : args.keySet()) { if (args.get(s).startsWith("\"")) { params.put(s, args.get(s));/*from w w w . jav a 2 s. c om*/ } else if (NumberUtils.isNumber(args.get(s))) { Object objVal; try { objVal = Long.parseLong(args.get(s)); } catch (NumberFormatException nfe1) { try { objVal = Float.parseFloat(args.get(s)); } catch (NumberFormatException nfe2) { objVal = s; } } if (objVal != null) { params.put(s, objVal); } else { params.put(s, "N/A"); } } else if ("false".equals(args.get(s).toLowerCase())) { params.put(s, false); } else if ("true".equals(args.get(s).toLowerCase())) { params.put(s, true); } else { SchemaField f = fp.getReq().getSchema().getField(args.get(s)); ValueSource vs = f.getType().getValueSource(f, fp); Object objVal = null; try { objVal = vs.getValues(this.context, this.readerContext).longVal(doc); params.put(s, objVal); } catch (IOException e) { // TODO Auto-generated catch block // TODO Log properly try { objVal = vs.getValues(this.context, this.readerContext).floatVal(doc); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); try { objVal = vs.getValues(this.context, this.readerContext).strVal(doc); } catch (IOException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } } e.printStackTrace(); } if (objVal != null) { params.put(s, objVal); } else { params.put(s, "N/A"); } } } CallableFunction cf = functions.get(this.functionName); if (cf == null) { logger.error("unable to get function " + this.functionName); } if (cf != null) { List<Object> fnParams = new ArrayList<Object>(); Parameter[] methodParameters = cf.getMethod().getParameters(); //TODO spostare quanto pi codice possibile in fase di inizializzazione for (Parameter p : methodParameters) { if (p.isAnnotationPresent(Param.class)) { Param paramAnnotation = p.getAnnotation(Param.class); fnParams.add(params.get(paramAnnotation.name())); } } try { return (float) cf.getMethod().invoke(cf.getInstance(), fnParams.toArray()); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { // TODO Auto-generated catch block logger.error("errore mentre chiamavo " + cf.getMethod().getName(), e); for (Object o : fnParams) { logger.error("p " + o.toString()); } } } return -1f; }
From source file:org.apache.solr.request.NumericFacets.java
License:Apache License
public static NamedList<Integer> getCounts(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, String sort) throws IOException { final boolean zeros = mincount <= 0; mincount = Math.max(mincount, 1); final SchemaField sf = searcher.getSchema().getField(fieldName); final FieldType ft = sf.getType(); final NumericType numericType = ft.getNumericType(); if (numericType == null) { throw new IllegalStateException(); }/*from w w w .j av a2s. c o m*/ final List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves(); // 1. accumulate final HashTable hashTable = new HashTable(); final Iterator<AtomicReaderContext> ctxIt = leaves.iterator(); AtomicReaderContext ctx = null; FieldCache.Longs longs = null; Bits docsWithField = null; int missingCount = 0; for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) { final int doc = docsIt.nextDoc(); if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) { do { ctx = ctxIt.next(); } while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()); assert doc >= ctx.docBase; switch (numericType) { case LONG: longs = FieldCache.DEFAULT.getLongs(ctx.reader(), fieldName, true); break; case INT: final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(ctx.reader(), fieldName, true); longs = new FieldCache.Longs() { @Override public long get(int docID) { return ints.get(docID); } }; break; case FLOAT: final FieldCache.Floats floats = FieldCache.DEFAULT.getFloats(ctx.reader(), fieldName, true); longs = new FieldCache.Longs() { @Override public long get(int docID) { return NumericUtils.floatToSortableInt(floats.get(docID)); } }; break; case DOUBLE: final FieldCache.Doubles doubles = FieldCache.DEFAULT.getDoubles(ctx.reader(), fieldName, true); longs = new FieldCache.Longs() { @Override public long get(int docID) { return NumericUtils.doubleToSortableLong(doubles.get(docID)); } }; break; default: throw new AssertionError(); } docsWithField = FieldCache.DEFAULT.getDocsWithField(ctx.reader(), fieldName); } long v = longs.get(doc - ctx.docBase); if (v != 0 || docsWithField.get(doc - ctx.docBase)) { hashTable.add(doc, v, 1); } else { ++missingCount; } } // 2. select top-k facet values final int pqSize = limit < 0 ? hashTable.size : Math.min(offset + limit, hashTable.size); final PriorityQueue<Entry> pq; if (FacetParams.FACET_SORT_COUNT.equals(sort) || FacetParams.FACET_SORT_COUNT_LEGACY.equals(sort)) { pq = new PriorityQueue<Entry>(pqSize) { @Override protected boolean lessThan(Entry a, Entry b) { if (a.count < b.count || (a.count == b.count && a.bits > b.bits)) { return true; } else { return false; } } }; } else { pq = new PriorityQueue<Entry>(pqSize) { @Override protected boolean lessThan(Entry a, Entry b) { return a.bits > b.bits; } }; } Entry e = null; for (int i = 0; i < hashTable.bits.length; ++i) { if (hashTable.counts[i] >= mincount) { if (e == null) { e = new Entry(); } e.bits = hashTable.bits[i]; e.count = hashTable.counts[i]; e.docID = hashTable.docIDs[i]; e = pq.insertWithOverflow(e); } } // 4. build the NamedList final ValueSource vs = ft.getValueSource(sf, null); final NamedList<Integer> result = new NamedList<Integer>(); // This stuff is complicated because if facet.mincount=0, the counts needs // to be merged with terms from the terms dict if (!zeros || FacetParams.FACET_SORT_COUNT.equals(sort) || FacetParams.FACET_SORT_COUNT_LEGACY.equals(sort)) { // Only keep items we're interested in final Deque<Entry> counts = new ArrayDeque<Entry>(); while (pq.size() > offset) { counts.addFirst(pq.pop()); } // Entries from the PQ first, then using the terms dictionary for (Entry entry : counts) { final int readerIdx = ReaderUtil.subIndex(entry.docID, leaves); final FunctionValues values = vs.getValues(Collections.emptyMap(), leaves.get(readerIdx)); result.add(values.strVal(entry.docID - leaves.get(readerIdx).docBase), entry.count); } if (zeros && (limit < 0 || result.size() < limit)) { // need to merge with the term dict if (!sf.indexed()) { throw new IllegalStateException("Cannot use " + FacetParams.FACET_MINCOUNT + "=0 on field " + sf.getName() + " which is not indexed"); } // Add zeros until there are limit results final Set<String> alreadySeen = new HashSet<String>(); while (pq.size() > 0) { Entry entry = pq.pop(); final int readerIdx = ReaderUtil.subIndex(entry.docID, leaves); final FunctionValues values = vs.getValues(Collections.emptyMap(), leaves.get(readerIdx)); alreadySeen.add(values.strVal(entry.docID - leaves.get(readerIdx).docBase)); } for (int i = 0; i < result.size(); ++i) { alreadySeen.add(result.getName(i)); } final Terms terms = searcher.getAtomicReader().terms(fieldName); if (terms != null) { final String prefixStr = TrieField.getMainValuePrefix(ft); final BytesRef prefix; if (prefixStr != null) { prefix = new BytesRef(prefixStr); } else { prefix = new BytesRef(); } final TermsEnum termsEnum = terms.iterator(null); BytesRef term; switch (termsEnum.seekCeil(prefix)) { case FOUND: case NOT_FOUND: term = termsEnum.term(); break; case END: term = null; break; default: throw new AssertionError(); } final CharsRef spare = new CharsRef(); for (int skipped = hashTable.size; skipped < offset && term != null && StringHelper.startsWith(term, prefix);) { ft.indexedToReadable(term, spare); final String termStr = spare.toString(); if (!alreadySeen.contains(termStr)) { ++skipped; } term = termsEnum.next(); } for (; term != null && StringHelper.startsWith(term, prefix) && (limit < 0 || result.size() < limit); term = termsEnum.next()) { ft.indexedToReadable(term, spare); final String termStr = spare.toString(); if (!alreadySeen.contains(termStr)) { result.add(termStr, 0); } } } } } else { // sort=index, mincount=0 and we have less than limit items // => Merge the PQ and the terms dictionary on the fly if (!sf.indexed()) { throw new IllegalStateException("Cannot use " + FacetParams.FACET_SORT + "=" + FacetParams.FACET_SORT_INDEX + " on a field which is not indexed"); } final Map<String, Integer> counts = new HashMap<String, Integer>(); while (pq.size() > 0) { final Entry entry = pq.pop(); final int readerIdx = ReaderUtil.subIndex(entry.docID, leaves); final FunctionValues values = vs.getValues(Collections.emptyMap(), leaves.get(readerIdx)); counts.put(values.strVal(entry.docID - leaves.get(readerIdx).docBase), entry.count); } final Terms terms = searcher.getAtomicReader().terms(fieldName); if (terms != null) { final String prefixStr = TrieField.getMainValuePrefix(ft); final BytesRef prefix; if (prefixStr != null) { prefix = new BytesRef(prefixStr); } else { prefix = new BytesRef(); } final TermsEnum termsEnum = terms.iterator(null); BytesRef term; switch (termsEnum.seekCeil(prefix)) { case FOUND: case NOT_FOUND: term = termsEnum.term(); break; case END: term = null; break; default: throw new AssertionError(); } final CharsRef spare = new CharsRef(); for (int i = 0; i < offset && term != null && StringHelper.startsWith(term, prefix); ++i) { term = termsEnum.next(); } for (; term != null && StringHelper.startsWith(term, prefix) && (limit < 0 || result.size() < limit); term = termsEnum.next()) { ft.indexedToReadable(term, spare); final String termStr = spare.toString(); Integer count = counts.get(termStr); if (count == null) { count = 0; } result.add(termStr, count); } } } if (missing) { result.add(null, missingCount); } return result; }
From source file:org.apache.solr.search.TestIndexSearcher.java
License:Apache License
private String getStringVal(SolrQueryRequest sqr, String field, int doc) throws IOException { SchemaField sf = sqr.getSchema().getField(field); ValueSource vs = sf.getType().getValueSource(sf, null); Map context = ValueSource.newContext(sqr.getSearcher()); vs.createWeight(context, sqr.getSearcher()); IndexReaderContext topReaderContext = sqr.getSearcher().getTopReaderContext(); List<AtomicReaderContext> leaves = topReaderContext.leaves(); int idx = ReaderUtil.subIndex(doc, leaves); AtomicReaderContext leaf = leaves.get(idx); FunctionValues vals = vs.getValues(context, leaf); return vals.strVal(doc - leaf.docBase); }
From source file:org.apache.solr.search.xjoin.TestXJoinValueSourceParser.java
License:Apache License
@SuppressWarnings({ "rawtypes" })
private FunctionValues functionValues(NamedList initArgs, String arg) throws Exception {
FunctionQParser fqp = mockFunctionQParser(arg);
XJoinValueSourceParser vsp = new XJoinValueSourceParser();
vsp.init(initArgs);//w w w .j av a2 s .co m
ValueSource vs = vsp.parse(fqp);
return vs.getValues(null, searcher.getLeafReader().getContext());
}
From source file:org.apache.solr.update.IndexFingerprint.java
License:Apache License
public static IndexFingerprint getFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, Long maxVersion) throws IOException { SchemaField versionField = VersionInfo.getAndCheckVersionField(searcher.getSchema()); ValueSource vs = versionField.getType().getValueSource(versionField, null); Map funcContext = ValueSource.newContext(searcher); vs.createWeight(funcContext, searcher); IndexFingerprint f = new IndexFingerprint(); f.maxVersionSpecified = maxVersion;/* w w w. j a va 2s . com*/ f.maxDoc = ctx.reader().maxDoc(); f.numDocs = ctx.reader().numDocs(); int maxDoc = ctx.reader().maxDoc(); Bits liveDocs = ctx.reader().getLiveDocs(); FunctionValues fv = vs.getValues(funcContext, ctx); for (int doc = 0; doc < maxDoc; doc++) { if (liveDocs != null && !liveDocs.get(doc)) continue; long v = fv.longVal(doc); f.maxVersionEncountered = Math.max(v, f.maxVersionEncountered); if (v <= f.maxVersionSpecified) { f.maxInHash = Math.max(v, f.maxInHash); f.versionsHash += Hash.fmix64(v); f.numVersions++; } } return f; }
From source file:org.apache.solr.update.VersionInfo.java
License:Apache License
public Long getVersionFromIndex(BytesRef idBytes) { // TODO: we could cache much of this and invalidate during a commit. // TODO: most DocValues classes are threadsafe - expose which. RefCounted<SolrIndexSearcher> newestSearcher = ulog.uhandler.core.getRealtimeSearcher(); try {//from w ww .jav a 2s . com SolrIndexSearcher searcher = newestSearcher.get(); long lookup = searcher.lookupId(idBytes); if (lookup < 0) return null; ValueSource vs = versionField.getType().getValueSource(versionField, null); Map context = ValueSource.newContext(searcher); vs.createWeight(context, searcher); FunctionValues fv = vs.getValues(context, searcher.getTopReaderContext().leaves().get((int) (lookup >> 32))); long ver = fv.longVal((int) lookup); return ver; } catch (IOException e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading version from index", e); } finally { if (newestSearcher != null) { newestSearcher.decref(); } } }
From source file:org.opencommercesearch.lucene.queries.function.valuesource.BoostValueSourceParserTest.java
License:Apache License
@Test public void testCachedBoosts() throws Exception { when(boostCache.get(boostId)).thenReturn(createBoosts()); ValueSource vs = vsp.parse(fp); verifyZeroInteractions(httpClient);/* w w w . j av a 2s. c o m*/ FunctionValues values = vs.getValues(null, null); Assert.assertEquals(0.7f, values.floatVal(0), 0.0f); Assert.assertEquals(0.6f, values.floatVal(1), 0.0f); Assert.assertEquals(0.5f, values.floatVal(2), 0.0f); for (int i = 3; i <= 10; i++) { Assert.assertEquals(0.0f, values.floatVal(i), 0.0f); } }
From source file:org.opencommercesearch.lucene.queries.function.valuesource.BoostValueSourceParserTest.java
License:Apache License
@Test public void testCachedBoostsWithTreatment() throws Exception { when(params.get(TREATMENT_ID)).thenReturn("b"); when(boostCache.get(boostId + "_b")).thenReturn(createBoosts()); ValueSource vs = vsp.parse(fp); verifyZeroInteractions(httpClient);//from w ww .j a va2 s.c o m FunctionValues values = vs.getValues(null, null); Assert.assertEquals(0.7f, values.floatVal(0), 0.0f); Assert.assertEquals(0.6f, values.floatVal(1), 0.0f); Assert.assertEquals(0.5f, values.floatVal(2), 0.0f); for (int i = 3; i <= 10; i++) { Assert.assertEquals(0.0f, values.floatVal(i), 0.0f); } }