List of usage examples for org.apache.lucene.util MapOfSets getMap
public Map<K, Set<V>> getMap()
From source file:org.apache.solr.uninverting.FieldCacheSanityChecker.java
License:Apache License
/** * Internal helper method used by check that iterates over * valMismatchKeys and generates a Collection of Insanity * instances accordingly. The MapOfSets are used to populate * the Insanity objects. //from www . j av a 2s. c o m * @see InsanityType#VALUEMISMATCH */ private Collection<Insanity> checkValueMismatch(MapOfSets<Integer, CacheEntry> valIdToItems, MapOfSets<ReaderField, Integer> readerFieldToValIds, Set<ReaderField> valMismatchKeys) { final List<Insanity> insanity = new ArrayList<>(valMismatchKeys.size() * 3); if (!valMismatchKeys.isEmpty()) { // we have multiple values for some ReaderFields final Map<ReaderField, Set<Integer>> rfMap = readerFieldToValIds.getMap(); final Map<Integer, Set<CacheEntry>> valMap = valIdToItems.getMap(); for (final ReaderField rf : valMismatchKeys) { final List<CacheEntry> badEntries = new ArrayList<>(valMismatchKeys.size() * 2); for (final Integer value : rfMap.get(rf)) { for (final CacheEntry cacheEntry : valMap.get(value)) { badEntries.add(cacheEntry); } } CacheEntry[] badness = new CacheEntry[badEntries.size()]; badness = badEntries.toArray(badness); insanity.add(new Insanity(InsanityType.VALUEMISMATCH, "Multiple distinct value objects for " + rf.toString(), badness)); } } return insanity; }
From source file:org.apache.solr.uninverting.FieldCacheSanityChecker.java
License:Apache License
/** * Internal helper method used by check that iterates over * the keys of readerFieldToValIds and generates a Collection * of Insanity instances whenever two (or more) ReaderField instances are * found that have an ancestry relationships. * * @see InsanityType#SUBREADER/*from w w w . j av a 2 s.c o m*/ */ private Collection<Insanity> checkSubreaders(MapOfSets<Integer, CacheEntry> valIdToItems, MapOfSets<ReaderField, Integer> readerFieldToValIds) { final List<Insanity> insanity = new ArrayList<>(23); Map<ReaderField, Set<ReaderField>> badChildren = new HashMap<>(17); MapOfSets<ReaderField, ReaderField> badKids = new MapOfSets<>(badChildren); // wrapper Map<Integer, Set<CacheEntry>> viToItemSets = valIdToItems.getMap(); Map<ReaderField, Set<Integer>> rfToValIdSets = readerFieldToValIds.getMap(); Set<ReaderField> seen = new HashSet<>(17); Set<ReaderField> readerFields = rfToValIdSets.keySet(); for (final ReaderField rf : readerFields) { if (seen.contains(rf)) continue; List<Object> kids = getAllDescendantReaderKeys(rf.readerKey); for (Object kidKey : kids) { ReaderField kid = new ReaderField(kidKey, rf.fieldName); if (badChildren.containsKey(kid)) { // we've already process this kid as RF and found other problems // track those problems as our own badKids.put(rf, kid); badKids.putAll(rf, badChildren.get(kid)); badChildren.remove(kid); } else if (rfToValIdSets.containsKey(kid)) { // we have cache entries for the kid badKids.put(rf, kid); } seen.add(kid); } seen.add(rf); } // every mapping in badKids represents an Insanity for (final ReaderField parent : badChildren.keySet()) { Set<ReaderField> kids = badChildren.get(parent); List<CacheEntry> badEntries = new ArrayList<>(kids.size() * 2); // put parent entr(ies) in first { for (final Integer value : rfToValIdSets.get(parent)) { badEntries.addAll(viToItemSets.get(value)); } } // now the entries for the descendants for (final ReaderField kid : kids) { for (final Integer value : rfToValIdSets.get(kid)) { badEntries.addAll(viToItemSets.get(value)); } } CacheEntry[] badness = new CacheEntry[badEntries.size()]; badness = badEntries.toArray(badness); insanity.add(new Insanity(InsanityType.SUBREADER, "Found caches for descendants of " + parent.toString(), badness)); } return insanity; }