List of usage examples for org.apache.lucene.index FieldInfos size
public int size()
From source file:com.floragunn.searchguard.configuration.DlsFlsFilterLeafReader.java
License:Open Source License
DlsFlsFilterLeafReader(final LeafReader delegate, final Set<String> includes, final Query dlsQuery) { super(delegate); flsEnabled = includes != null && !includes.isEmpty(); dlsEnabled = dlsQuery != null;/*from w w w . ja v a 2s .c om*/ if (flsEnabled) { this.includes = includes.toArray(new String[0]); final FieldInfos infos = delegate.getFieldInfos(); final List<FieldInfo> fi = new ArrayList<FieldInfo>(infos.size()); for (final FieldInfo info : infos) { final String fname = info.name; if ((!WildcardMatcher.containsWildcard(fname) && includes.contains(fname)) || WildcardMatcher.matchAny(this.includes, fname)) { fi.add(info); } } this.flsFieldInfos = new FieldInfos(fi.toArray(new FieldInfo[0])); } else { this.includes = null; this.flsFieldInfos = null; } if (dlsEnabled) { try { //borrowed from Apache Lucene (Copyright Apache Software Foundation (ASF)) final IndexSearcher searcher = new IndexSearcher(this); searcher.setQueryCache(null); final boolean needsScores = false; final Weight preserveWeight = searcher.createNormalizedWeight(dlsQuery, needsScores); final int maxDoc = in.maxDoc(); final FixedBitSet bits = new FixedBitSet(maxDoc); final Scorer preverveScorer = preserveWeight.scorer(this.getContext()); if (preverveScorer != null) { bits.or(preverveScorer.iterator()); } if (in.hasDeletions()) { final Bits oldLiveDocs = in.getLiveDocs(); assert oldLiveDocs != null; final DocIdSetIterator it = new BitSetIterator(bits, 0L); for (int i = it.nextDoc(); i != DocIdSetIterator.NO_MORE_DOCS; i = it.nextDoc()) { if (!oldLiveDocs.get(i)) { bits.clear(i); } } } this.liveDocs = bits; this.numDocs = bits.cardinality(); } catch (Exception e) { throw new RuntimeException(e); } } else { this.liveDocs = null; this.numDocs = -1; } }
From source file:com.vmware.xenon.services.common.FieldInfoCache.java
License:Open Source License
/** * At the end there will be a single segment with all the fields. So it makes sense to cache the longest * list of infos every encountered.//from w ww . j a v a 2 s . c o m * * @param infos * @return */ public FieldInfos dedupFieldInfos(FieldInfo[] infos) { FieldInfos cached = this.longest; if (cached == null || cached.size() < infos.length) { cached = new FieldInfos(infos); trimFieldInfos(cached); this.longest = cached; return cached; } if (cached.size() == infos.length) { for (FieldInfo a : infos) { FieldInfo b = cached.fieldInfo(a.number); if (b == null || !FieldInfoCache.equals(a, b)) { FieldInfos update = new FieldInfos(infos); trimFieldInfos(update); this.longest = update; return update; } } return cached; } FieldInfos update = new FieldInfos(infos); trimFieldInfos(update); return update; }
From source file:com.vmware.xenon.services.common.Lucene60FieldInfosFormatWithCache.java
License:Open Source License
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene60FieldInfosFormatWithCache.CODEC_NAME, Lucene60FieldInfosFormatWithCache.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency();//from www .j a va2 s . c o m output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) { bits |= STORE_TERMVECTOR; } if (fi.omitsNorms()) { bits |= OMIT_NORMS; } if (fi.hasPayloads()) { bits |= STORE_PAYLOADS; } output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); int pointDimensionCount = fi.getPointDimensionCount(); output.writeVInt(pointDimensionCount); if (pointDimensionCount != 0) { output.writeVInt(fi.getPointNumBytes()); } } CodecUtil.writeFooter(output); } }
From source file:dk.statsbiblioteket.netark.dvenabler.wrapper.DVAtomicReader.java
License:Apache License
@Override public FieldInfos getFieldInfos() { log.info("Merging getFieldInfos called with " + maxDoc() + " docs"); long startTime = System.nanoTime(); FieldInfos original = super.getFieldInfos(); FieldInfo[] modified = new FieldInfo[original.size()]; int index = 0; for (FieldInfo oInfo : original) { modified[index++] = dvConfigs.containsKey(oInfo.name) ? dvConfigs.get(oInfo.name).getFieldInfo() : oInfo;/* w w w . jav a 2s .c o m*/ } /*FieldInfo mInfo = new FieldInfo( oInfo.name, oInfo.isIndexed(), oInfo.number, oInfo.hasVectors(), oInfo.omitsNorms(), oInfo.hasPayloads(), oInfo.getIndexOptions(), mDocValuesType, oInfo.getNormType(), oInfo.attributes()); */ log.info("Merged " + original.size() + " original and " + dvConfigs.size() + " tweaked FieldInfos for " + maxDoc() + " docs in " + (System.nanoTime() - startTime) / 1000000 + "ms"); return new FieldInfos(modified); }
From source file:org.elasticsearch.xpack.core.security.authz.accesscontrol.FieldSubsetReaderTests.java
License:Open Source License
/** * test we have correct fieldinfos metadata *//* ww w .ja v a 2s . co m*/ public void testFieldInfos() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(null); IndexWriter iw = new IndexWriter(dir, iwc); // add document with 2 fields Document doc = new Document(); doc.add(new StringField("fieldA", "test", Field.Store.NO)); doc.add(new StringField("fieldB", "test", Field.Store.NO)); iw.addDocument(doc); // open reader DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field LeafReader segmentReader = ir.leaves().get(0).reader(); FieldInfos infos = segmentReader.getFieldInfos(); assertEquals(1, infos.size()); assertNotNull(infos.fieldInfo("fieldA")); assertNull(infos.fieldInfo("fieldB")); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); }
From source file:org.eu.bitzone.Leia.java
License:Apache License
public void showDiagnostics(final Object segmentsTable) { final Object diagsTable = find("diagsTable"); removeAll(diagsTable);/*from w ww . j a v a2s.com*/ final Object row = getSelectedItem(segmentsTable); if (row == null) { return; } final SegmentCommitInfo si = (SegmentCommitInfo) getProperty(row, "si"); if (si == null) { showStatus("Missing SegmentInfoPerCommit???"); return; } Map<String, String> map = si.info.attributes(); if (map != null) { for (final Entry<String, String> e : map.entrySet()) { final Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "A"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } } // separator // Object r1 = create("row"); // add(diagsTable, r1); // Object c1 = create("cell"); // setBoolean(c1, "enabled", false); // add(r1, c1); map = si.info.getDiagnostics(); if (map != null) { for (final Entry<String, String> e : map.entrySet()) { final Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "D"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } } // separator Object r1 = create("row"); add(diagsTable, r1); Object c1 = create("cell"); setBoolean(c1, "enabled", false); add(r1, c1); // codec info final Codec codec = si.info.getCodec(); map = new LinkedHashMap<String, String>(); map.put("codecName", codec.getName()); map.put("codecClassName", codec.getClass().getName()); map.put("docValuesFormat", codec.docValuesFormat().getClass().getName()); map.put("fieldInfosFormat", codec.fieldInfosFormat().getClass().getName()); map.put("liveDocsFormat", codec.liveDocsFormat().getClass().getName()); map.put("normsFormat", codec.normsFormat().getClass().getName()); map.put("postingsFormat", codec.postingsFormat().toString() + " " + codec.postingsFormat().getClass().getName()); map.put("segmentInfoFormat", codec.segmentInfoFormat().getClass().getName()); map.put("storedFieldsFormat", codec.storedFieldsFormat().getClass().getName()); map.put("termVectorsFormat", codec.termVectorsFormat().getClass().getName()); try { final List<String> files = new ArrayList<String>(si.files()); Collections.sort(files); map.put("---files---", files.toString()); if (si.info.getUseCompoundFile()) { final Directory d = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(si.info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READ, false); files.clear(); files.addAll(Arrays.asList(d.listAll())); d.close(); Collections.sort(files); map.put("-CFS-files-", files.toString()); } } catch (final Exception e) { e.printStackTrace(); map.put("---files---", "Exception: " + e.toString()); } for (final Entry<String, String> e : map.entrySet()) { final Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "C"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } // fieldInfos try { final SegmentReader sr = new SegmentReader(si, 1, IOContext.READ); final FieldInfos fis = sr.getFieldInfos(); map = new LinkedHashMap<String, String>(); final List<String> flds = new ArrayList<String>(fis.size()); for (final FieldInfo fi : fis) { flds.add(fi.name); } Collections.sort(flds); map.put("L---fields---", flds.toString()); for (final String fn : flds) { final FieldInfo fi = fis.fieldInfo(fn); map.put("A" + fi.name, fi.attributes().toString()); } map.put("F---flags----", "IdfpoPVNtxxDtxx"); for (final String fn : flds) { final FieldInfo fi = fis.fieldInfo(fn); map.put("F" + fi.name, Util.fieldFlags(null, fi)); } sr.close(); // separator r1 = create("row"); add(diagsTable, r1); c1 = create("cell"); setBoolean(c1, "enabled", false); add(r1, c1); for (final Entry<String, String> e : map.entrySet()) { final Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "F" + e.getKey().charAt(0)); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey().substring(1)); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); if (e.getKey().startsWith("F")) { setFont(cell, courier); } add(r, cell); } } catch (final IOException e1) { e1.printStackTrace(); } }
From source file:org.getopt.luke.Luke.java
License:Apache License
public void showDiagnostics(Object segmentsTable) { Object diagsTable = find("diagsTable"); removeAll(diagsTable);//from w w w. j a v a 2 s. c o m Object row = getSelectedItem(segmentsTable); if (row == null) { return; } SegmentCommitInfo si = (SegmentCommitInfo) getProperty(row, "si"); if (si == null) { showStatus("Missing SegmentInfoPerCommit???"); return; } Map<String, String> map = si.info.attributes(); if (map != null) { for (Entry<String, String> e : map.entrySet()) { Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "A"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } } // separator // Object r1 = create("row"); // add(diagsTable, r1); // Object c1 = create("cell"); // setBoolean(c1, "enabled", false); // add(r1, c1); map = si.info.getDiagnostics(); if (map != null) { for (Entry<String, String> e : map.entrySet()) { Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "D"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } } // separator Object r1 = create("row"); add(diagsTable, r1); Object c1 = create("cell"); setBoolean(c1, "enabled", false); add(r1, c1); // codec info Codec codec = si.info.getCodec(); map = new LinkedHashMap<String, String>(); map.put("codecName", codec.getName()); map.put("codecClassName", codec.getClass().getName()); map.put("docValuesFormat", codec.docValuesFormat().getClass().getName()); map.put("fieldInfosFormat", codec.fieldInfosFormat().getClass().getName()); map.put("liveDocsFormat", codec.liveDocsFormat().getClass().getName()); map.put("normsFormat", codec.normsFormat().getClass().getName()); map.put("postingsFormat", codec.postingsFormat().toString() + " " + codec.postingsFormat().getClass().getName()); map.put("segmentInfoFormat", codec.segmentInfoFormat().getClass().getName()); map.put("storedFieldsFormat", codec.storedFieldsFormat().getClass().getName()); map.put("termVectorsFormat", codec.termVectorsFormat().getClass().getName()); try { List<String> files = new ArrayList<String>(si.files()); Collections.sort(files); map.put("---files---", files.toString()); if (si.info.getUseCompoundFile()) { Directory d = new CompoundFileDirectory(dir, IndexFileNames.segmentFileName(si.info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READ, false); files.clear(); files.addAll(Arrays.asList(d.listAll())); d.close(); Collections.sort(files); map.put("-CFS-files-", files.toString()); } } catch (Exception e) { e.printStackTrace(); map.put("---files---", "Exception: " + e.toString()); } for (Entry<String, String> e : map.entrySet()) { Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "C"); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey()); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); add(r, cell); } // fieldInfos try { SegmentReader sr = new SegmentReader(si, 1, IOContext.READ); FieldInfos fis = sr.getFieldInfos(); map = new LinkedHashMap<String, String>(); List<String> flds = new ArrayList<String>(fis.size()); for (FieldInfo fi : fis) { flds.add(fi.name); } Collections.sort(flds); map.put("L---fields---", flds.toString()); for (String fn : flds) { FieldInfo fi = fis.fieldInfo(fn); map.put("A" + fi.name, fi.attributes().toString()); } map.put("F---flags----", "IdfpoPVNtxxDtxx"); for (String fn : flds) { FieldInfo fi = fis.fieldInfo(fn); map.put("F" + fi.name, Util.fieldFlags(null, fi)); } sr.close(); // separator r1 = create("row"); add(diagsTable, r1); c1 = create("cell"); setBoolean(c1, "enabled", false); add(r1, c1); for (Entry<String, String> e : map.entrySet()) { Object r = create("row"); add(diagsTable, r); Object cell = create("cell"); setString(cell, "text", "F" + e.getKey().charAt(0)); add(r, cell); cell = create("cell"); setString(cell, "text", e.getKey().substring(1)); add(r, cell); cell = create("cell"); setString(cell, "text", e.getValue()); if (e.getKey().startsWith("F")) { setFont(cell, courier); } add(r, cell); } } catch (IOException e1) { e1.printStackTrace(); } }
From source file:org.oscm.search.IndexRequestMasterListenerIT.java
private void assertDocsInIndex(final Class<?> clazz, final String comment, final int expectedNumDocs, final int expectedNumIndexedAttributes, final List<String> expectedAttributes) throws Exception { Boolean evaluationTookPlace = runTX(new Callable<Boolean>() { @Override/*from w w w .j av a 2 s .c om*/ public Boolean call() throws Exception { boolean evaluatedIndex = false; Session session = dm.getSession(); if (session != null) { FullTextSession fullTextSession = Search.getFullTextSession(session); SearchFactory searchFactory = fullTextSession.getSearchFactory(); IndexReader reader = searchFactory.getIndexReaderAccessor().open(clazz); try { assertEquals(comment, expectedNumDocs, reader.numDocs()); if (expectedNumDocs > 0) { final FieldInfos indexedFieldNames = ReaderUtil.getMergedFieldInfos(reader); for (String expectedAttr : expectedAttributes) { assertNotNull("attribute " + expectedAttr + " does not exist in index: " + indexedFieldNames, indexedFieldNames.fieldInfo(expectedAttr)); } assertNotNull("attribute \"key\" does not exist in index: " + indexedFieldNames, indexedFieldNames.fieldInfo("key")); assertNotNull( "attribute \"_hibernate_class\" does not exist in index: " + indexedFieldNames, indexedFieldNames.fieldInfo("_hibernate_class")); assertEquals( "More or less attributes indexed than expected, attributes retrieved from index: " + indexedFieldNames, expectedNumIndexedAttributes + 2, indexedFieldNames.size()); evaluatedIndex = true; } } finally { searchFactory.getIndexReaderAccessor().close(reader); } } return Boolean.valueOf(evaluatedIndex); } }); if (expectedNumDocs > 0) { Assert.assertTrue("Index not found, no evaluation took place", evaluationTookPlace.booleanValue()); } }
From source file:tech.beshu.ror.es.security.DocumentFieldReader.java
License:Open Source License
private DocumentFieldReader(LeafReader reader, Set<String> fields) { super(reader); this.policy = new FieldsSyncRule.FieldPolicy(fields); FieldInfos fInfos = in.getFieldInfos(); Set<String> baseFields = new HashSet<>(fInfos.size()); for (FieldInfo f : fInfos) { baseFields.add(f.name);//from ww w .j av a2 s . c o m } if (baseFields.isEmpty()) { logger.warn("original fields were empty! This is weird!"); remainingFieldsInfo = fInfos; } else { Set<FieldInfo> remainingFields = StreamSupport.stream(fInfos.spliterator(), false) .filter(x -> policy.canKeep(x.name)).collect(Collectors.toSet()); this.remainingFieldsInfo = new FieldInfos( remainingFields.toArray(new FieldInfo[remainingFields.size()])); } if (logger.isDebugEnabled()) { logger.debug("always allow: " + Constants.FIELDS_ALWAYS_ALLOW); logger.debug("original fields were: " + baseFields); logger.debug("new fields are: " + StreamSupport.stream(remainingFieldsInfo.spliterator(), false) .map(f -> f.name).collect(Collectors.toSet())); } }