List of usage examples for org.apache.lucene.codecs CodecUtil footerLength
public static int footerLength()
From source file:com.lucure.core.codec.CompressingStoredFieldsReader.java
License:Apache License
/** Sole constructor. */ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, String segmentSuffix, FieldInfos fn, IOContext context, String formatName, CompressionMode compressionMode) throws IOException { this.compressionMode = compressionMode; final String segment = si.name; boolean success = false; fieldInfos = fn;/*w ww .j ava 2 s . c o m*/ numDocs = si.getDocCount(); ChecksumIndexInput indexStream = null; try { final String indexStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION); final String fieldsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION); // Load the index into memory indexStream = d.openChecksumInput(indexStreamFN, context); final String codecNameIdx = formatName + CODEC_SFX_IDX; version = CodecUtil.checkHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT); assert CodecUtil.headerLength(codecNameIdx) == indexStream.getFilePointer(); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; if (version >= VERSION_CHECKSUM) { maxPointer = indexStream.readVLong(); CodecUtil.checkFooter(indexStream); } else { CodecUtil.checkEOF(indexStream); } indexStream.close(); indexStream = null; // Open the data file and read metadata fieldsStream = d.openInput(fieldsStreamFN, context); if (version >= VERSION_CHECKSUM) { if (maxPointer + CodecUtil.footerLength() != fieldsStream.length()) { throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.length()); } } else { maxPointer = fieldsStream.length(); } this.maxPointer = maxPointer; final String codecNameDat = formatName + CODEC_SFX_DAT; final int fieldsVersion = CodecUtil.checkHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT); if (version != fieldsVersion) { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } assert CodecUtil.headerLength(codecNameDat) == fieldsStream.getFilePointer(); if (version >= VERSION_BIG_CHUNKS) { chunkSize = fieldsStream.readVInt(); } else { chunkSize = -1; } packedIntsVersion = fieldsStream.readVInt(); decompressor = compressionMode.newDecompressor(); this.bytes = new BytesRef(); if (version >= VERSION_CHECKSUM) { // NOTE: data file is too costly to verify checksum against all the bytes on open, // but for now we at least verify proper structure of the checksum footer: which looks // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption // such as file truncation. CodecUtil.retrieveChecksum(fieldsStream); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this, indexStream); } } }
From source file:com.rocana.lucene.codec.v1.RocanaBlockTreeTermsReader.java
License:Apache License
/** Seek {@code input} to the directory offset. */ private void seekDir(IndexInput input, long dirOffset) throws IOException { input.seek(input.length() - CodecUtil.footerLength() - 8); dirOffset = input.readLong();/*w ww . ja v a2 s . co m*/ input.seek(dirOffset); }
From source file:org.elasticsearch.gateway.local.state.meta.MetaDataStateFormat.java
License:Apache License
/** * Reads the state from a given file and compares the expected version against the actual version of * the state./*from www.ja v a 2 s. c o m*/ */ public final T read(File file) throws IOException { try (Directory dir = newDirectory(file.getParentFile())) { try (final IndexInput indexInput = dir.openInput(file.getName(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; indexInput.readLong(); // version currently unused long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType) .createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch (CorruptIndexException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
From source file:org.elasticsearch.gateway.MetaDataStateFormat.java
License:Apache License
/** * Reads the state from a given file and compares the expected version against the actual version of * the state.//from w w w . java 2 s . c om */ public final T read(Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; indexInput.readLong(); // version currently unused long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType) .createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
From source file:org.elasticsearch.index.store.StoreTest.java
License:Apache License
public void testCheckIntegrity() throws IOException { Directory dir = newDirectory();//w ww .java2s .c o m long luceneFileLength = 0; try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); luceneFileLength += bytesRef.length; } CodecUtil.writeFooter(output); luceneFileLength += CodecUtil.footerLength(); } final Adler32 adler32 = new Adler32(); long legacyFileLength = 0; try (IndexOutput output = dir.createOutput("legacy.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); adler32.update(bytesRef.bytes, bytesRef.offset, bytesRef.length); legacyFileLength += bytesRef.length; } } final long luceneChecksum; final long adler32LegacyChecksum = adler32.getValue(); try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } { // positive check StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_48); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertTrue(Store.checkIntegrity(lucene, dir)); assertTrue(Store.checkIntegrity(legacy, dir)); } { // negative check - wrong checksum StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_48); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum + 1)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrity(lucene, dir)); assertFalse(Store.checkIntegrity(legacy, dir)); } { // negative check - wrong length StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_48); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength + 1, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrity(lucene, dir)); assertFalse(Store.checkIntegrity(legacy, dir)); } { // negative check - wrong file StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_48); StoreFileMetaData legacy = new StoreFileMetaData("lucene_checksum.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrity(lucene, dir)); assertFalse(Store.checkIntegrity(legacy, dir)); } dir.close(); }
From source file:org.elasticsearch.index.store.StoreTests.java
License:Apache License
public void testCheckIntegrity() throws IOException { Directory dir = newDirectory();/* w ww. j av a 2s. c o m*/ long luceneFileLength = 0; try (IndexOutput output = dir.createOutput("lucene_checksum.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); luceneFileLength += bytesRef.length; } CodecUtil.writeFooter(output); luceneFileLength += CodecUtil.footerLength(); } final Adler32 adler32 = new Adler32(); long legacyFileLength = 0; try (IndexOutput output = dir.createOutput("legacy.bin", IOContext.DEFAULT)) { int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); adler32.update(bytesRef.bytes, bytesRef.offset, bytesRef.length); legacyFileLength += bytesRef.length; } } final long luceneChecksum; final long adler32LegacyChecksum = adler32.getValue(); try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } { // positive check StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertTrue(Store.checkIntegrityNoException(lucene, dir)); assertTrue(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong checksum StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength, Store.digestToString(luceneChecksum + 1), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum + 1)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong length StoreFileMetaData lucene = new StoreFileMetaData("lucene_checksum.bin", luceneFileLength + 1, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData("legacy.bin", legacyFileLength + 1, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } { // negative check - wrong file StoreFileMetaData lucene = new StoreFileMetaData("legacy.bin", luceneFileLength, Store.digestToString(luceneChecksum), Version.LUCENE_4_8_0); StoreFileMetaData legacy = new StoreFileMetaData("lucene_checksum.bin", legacyFileLength, Store.digestToString(adler32LegacyChecksum)); assertTrue(legacy.hasLegacyChecksum()); assertFalse(lucene.hasLegacyChecksum()); assertFalse(Store.checkIntegrityNoException(lucene, dir)); assertFalse(Store.checkIntegrityNoException(legacy, dir)); } dir.close(); }
From source file:org.elasticsearch.repositories.blobstore.ChecksumBlobStoreFormat.java
License:Apache License
/** * Reads blob with specified name without resolving the blobName using using {@link #blobName} method. * * @param blobContainer blob container/*from w w w. j av a 2 s . c o m*/ * @param blobName blob name */ public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { try (InputStream inputStream = blobContainer.readBlob(blobName)) { byte[] bytes = ByteStreams.toByteArray(inputStream); final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, codec, VERSION, VERSION); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; BytesReference bytesReference = new BytesArray(bytes, (int) filePointer, (int) contentSize); return read(bytesReference); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
From source file:org.elasticsearch.search.suggest.completion.old.AnalyzingCompletionLookupProvider.java
License:Apache License
@Override public Completion090PostingsFormat.LookupFactory load(IndexInput input) throws IOException { long sizeInBytes = 0; int version = CodecUtil.checkHeader(input, CODEC_NAME, CODEC_VERSION_START, CODEC_VERSION_LATEST); if (version >= CODEC_VERSION_CHECKSUMS) { CodecUtil.checksumEntireFile(input); }/* ww w . j a v a2 s .co m*/ final long metaPointerPosition = input.length() - (version >= CODEC_VERSION_CHECKSUMS ? 8 + CodecUtil.footerLength() : 8); final Map<String, AnalyzingSuggestHolder> lookupMap = new HashMap<>(); input.seek(metaPointerPosition); long metaPointer = input.readLong(); input.seek(metaPointer); int numFields = input.readVInt(); Map<Long, String> meta = new TreeMap<>(); for (int i = 0; i < numFields; i++) { String name = input.readString(); long offset = input.readVLong(); meta.put(offset, name); } for (Map.Entry<Long, String> entry : meta.entrySet()) { input.seek(entry.getKey()); FST<Pair<Long, BytesRef>> fst = new FST<>(input, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); int maxAnalyzedPathsForOneInput = input.readVInt(); int maxSurfaceFormsPerAnalyzedForm = input.readVInt(); int maxGraphExpansions = input.readInt(); int options = input.readVInt(); boolean preserveSep = (options & SERIALIZE_PRESERVE_SEPARATORS) != 0; boolean hasPayloads = (options & SERIALIZE_HAS_PAYLOADS) != 0; boolean preservePositionIncrements = (options & SERIALIZE_PRESERVE_POSITION_INCREMENTS) != 0; // first version did not include these three fields, so fall back to old default (before the analyzingsuggester // was updated in Lucene, so we cannot use the suggester defaults) int sepLabel, payloadSep, endByte, holeCharacter; switch (version) { case CODEC_VERSION_START: sepLabel = 0xFF; payloadSep = '\u001f'; endByte = 0x0; holeCharacter = '\u001E'; break; default: sepLabel = input.readVInt(); endByte = input.readVInt(); payloadSep = input.readVInt(); holeCharacter = input.readVInt(); } AnalyzingSuggestHolder holder = new AnalyzingSuggestHolder(preserveSep, preservePositionIncrements, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, hasPayloads, maxAnalyzedPathsForOneInput, fst, sepLabel, payloadSep, endByte, holeCharacter); sizeInBytes += fst.ramBytesUsed(); lookupMap.put(entry.getValue(), holder); } final long ramBytesUsed = sizeInBytes; return new Completion090PostingsFormat.LookupFactory() { @Override public Lookup getLookup(OldCompletionFieldMapper.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext) { AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(fieldType.names().indexName()); if (analyzingSuggestHolder == null) { return null; } int flags = analyzingSuggestHolder.getPreserveSeparator() ? XAnalyzingSuggester.PRESERVE_SEP : 0; final XAnalyzingSuggester suggester; final Automaton queryPrefix = fieldType.requiresContext() ? ContextMapping.ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; if (suggestionContext.isFuzzy()) { suggester = new XFuzzySuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(), analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); } else { suggester = new XAnalyzingSuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); } return suggester; } @Override public CompletionStats stats(String... fields) { long sizeInBytes = 0; ObjectLongHashMap<String> completionFields = null; if (fields != null && fields.length > 0) { completionFields = new ObjectLongHashMap<>(fields.length); } for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) { sizeInBytes += entry.getValue().fst.ramBytesUsed(); if (fields == null || fields.length == 0) { continue; } if (Regex.simpleMatch(fields, entry.getKey())) { long fstSize = entry.getValue().fst.ramBytesUsed(); completionFields.addTo(entry.getKey(), fstSize); } } return new CompletionStats(sizeInBytes, completionFields); } @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType) { return lookupMap.get(fieldType.names().indexName()); } @Override public long ramBytesUsed() { return ramBytesUsed; } @Override public Collection<Accountable> getChildResources() { return Accountables.namedAccountables("field", lookupMap); } }; }
From source file:org.elasticsearch.search.suggest.completion2x.AnalyzingCompletionLookupProvider.java
License:Apache License
@Override public LookupFactory load(IndexInput input) throws IOException { long sizeInBytes = 0; int version = CodecUtil.checkHeader(input, CODEC_NAME, CODEC_VERSION_START, CODEC_VERSION_LATEST); if (version >= CODEC_VERSION_CHECKSUMS) { CodecUtil.checksumEntireFile(input); }/*w w w .j a v a 2s . c o m*/ final long metaPointerPosition = input.length() - (version >= CODEC_VERSION_CHECKSUMS ? 8 + CodecUtil.footerLength() : 8); final Map<String, AnalyzingSuggestHolder> lookupMap = new HashMap<>(); input.seek(metaPointerPosition); long metaPointer = input.readLong(); input.seek(metaPointer); int numFields = input.readVInt(); Map<Long, String> meta = new TreeMap<>(); for (int i = 0; i < numFields; i++) { String name = input.readString(); long offset = input.readVLong(); meta.put(offset, name); } for (Map.Entry<Long, String> entry : meta.entrySet()) { input.seek(entry.getKey()); FST<Pair<Long, BytesRef>> fst = new FST<>(input, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); int maxAnalyzedPathsForOneInput = input.readVInt(); int maxSurfaceFormsPerAnalyzedForm = input.readVInt(); int maxGraphExpansions = input.readInt(); int options = input.readVInt(); boolean preserveSep = (options & SERIALIZE_PRESERVE_SEPARATORS) != 0; boolean hasPayloads = (options & SERIALIZE_HAS_PAYLOADS) != 0; boolean preservePositionIncrements = (options & SERIALIZE_PRESERVE_POSITION_INCREMENTS) != 0; // first version did not include these three fields, so fall back to old default (before the analyzingsuggester // was updated in Lucene, so we cannot use the suggester defaults) int sepLabel, payloadSep, endByte, holeCharacter; switch (version) { case CODEC_VERSION_START: sepLabel = 0xFF; payloadSep = '\u001f'; endByte = 0x0; holeCharacter = '\u001E'; break; default: sepLabel = input.readVInt(); endByte = input.readVInt(); payloadSep = input.readVInt(); holeCharacter = input.readVInt(); } AnalyzingSuggestHolder holder = new AnalyzingSuggestHolder(preserveSep, preservePositionIncrements, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, hasPayloads, maxAnalyzedPathsForOneInput, fst, sepLabel, payloadSep, endByte, holeCharacter); sizeInBytes += fst.ramBytesUsed(); lookupMap.put(entry.getValue(), holder); } final long ramBytesUsed = sizeInBytes; return new LookupFactory() { @Override public Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext) { AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(fieldType.name()); if (analyzingSuggestHolder == null) { return null; } int flags = analyzingSuggestHolder.getPreserveSeparator() ? XAnalyzingSuggester.PRESERVE_SEP : 0; final XAnalyzingSuggester suggester; final Automaton queryPrefix = fieldType.requiresContext() ? ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; final FuzzyOptions fuzzyOptions = suggestionContext.getFuzzyOptions(); if (fuzzyOptions != null) { suggester = new XFuzzySuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, fuzzyOptions.getEditDistance(), fuzzyOptions.isTranspositions(), fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), fuzzyOptions.isUnicodeAware(), analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); } else { suggester = new XAnalyzingSuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); } return suggester; } @Override public CompletionStats stats(String... fields) { long sizeInBytes = 0; ObjectLongHashMap<String> completionFields = null; if (fields != null && fields.length > 0) { completionFields = new ObjectLongHashMap<>(fields.length); } for (Map.Entry<String, AnalyzingSuggestHolder> entry : lookupMap.entrySet()) { sizeInBytes += entry.getValue().fst.ramBytesUsed(); if (fields == null || fields.length == 0) { continue; } if (Regex.simpleMatch(fields, entry.getKey())) { long fstSize = entry.getValue().fst.ramBytesUsed(); completionFields.addTo(entry.getKey(), fstSize); } } return new CompletionStats(sizeInBytes, completionFields); } @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType) { return lookupMap.get(fieldType.name()); } @Override public long ramBytesUsed() { return ramBytesUsed; } @Override public Collection<Accountable> getChildResources() { return Accountables.namedAccountables("field", lookupMap); } }; }