Example usage for org.apache.lucene.store DataInput readVLong

List of usage examples for org.apache.lucene.store DataInput readVLong

Introduction

In this page you can find the example usage for org.apache.lucene.store DataInput readVLong.

Prototype

public long readVLong() throws IOException 

Source Link

Document

Reads a long stored in variable-length format.

Usage

From source file:com.lucure.core.codec.CompressingStoredFieldsReader.java

License:Apache License

@Override
public void visitDocument(int docID, StoredFieldVisitor visitor) throws IOException {
    fieldsStream.seek(indexReader.getStartPointer(docID));

    final int docBase = fieldsStream.readVInt();
    final int chunkDocs = fieldsStream.readVInt();
    if (docID < docBase || docID >= docBase + chunkDocs || docBase + chunkDocs > numDocs) {
        throw new CorruptIndexException("Corrupted: docID=" + docID + ", docBase=" + docBase + ", chunkDocs="
                + chunkDocs + ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")");
    }//from  w  w  w.  j  av a2  s  . co  m

    final int numStoredFields, offset, length, totalLength;
    if (chunkDocs == 1) {
        numStoredFields = fieldsStream.readVInt();
        offset = 0;
        length = fieldsStream.readVInt();
        totalLength = length;
    } else {
        final int bitsPerStoredFields = fieldsStream.readVInt();
        if (bitsPerStoredFields == 0) {
            numStoredFields = fieldsStream.readVInt();
        } else if (bitsPerStoredFields > 31) {
            throw new CorruptIndexException(
                    "bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
        } else {
            final long filePointer = fieldsStream.getFilePointer();
            final PackedInts.Reader reader = PackedInts.getDirectReaderNoHeader(fieldsStream,
                    PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
            numStoredFields = (int) (reader.get(docID - docBase));
            fieldsStream.seek(filePointer
                    + PackedInts.Format.PACKED.byteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
        }

        final int bitsPerLength = fieldsStream.readVInt();
        if (bitsPerLength == 0) {
            length = fieldsStream.readVInt();
            offset = (docID - docBase) * length;
            totalLength = chunkDocs * length;
        } else if (bitsPerStoredFields > 31) {
            throw new CorruptIndexException(
                    "bitsPerLength=" + bitsPerLength + " (resource=" + fieldsStream + ")");
        } else {
            final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(fieldsStream,
                    PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
            int off = 0;
            for (int i = 0; i < docID - docBase; ++i) {
                off += it.next();
            }
            offset = off;
            length = (int) it.next();
            off += length;
            for (int i = docID - docBase + 1; i < chunkDocs; ++i) {
                off += it.next();
            }
            totalLength = off;
        }
    }

    if ((length == 0) != (numStoredFields == 0)) {
        throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields
                + " (resource=" + fieldsStream + ")");
    }
    if (numStoredFields == 0) {
        // nothing to do
        return;
    }

    final DataInput documentInput;
    if (version >= VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize) {
        assert chunkSize > 0;
        assert offset < chunkSize;

        decompressor.decompress(fieldsStream, chunkSize, offset, Math.min(length, chunkSize - offset), bytes);
        documentInput = new DataInput() {

            int decompressed = bytes.length;

            void fillBuffer() throws IOException {
                assert decompressed <= length;
                if (decompressed == length) {
                    throw new EOFException();
                }
                final int toDecompress = Math.min(length - decompressed, chunkSize);
                decompressor.decompress(fieldsStream, toDecompress, 0, toDecompress, bytes);
                decompressed += toDecompress;
            }

            @Override
            public byte readByte() throws IOException {
                if (bytes.length == 0) {
                    fillBuffer();
                }
                --bytes.length;
                return bytes.bytes[bytes.offset++];
            }

            @Override
            public void readBytes(byte[] b, int offset, int len) throws IOException {
                while (len > bytes.length) {
                    System.arraycopy(bytes.bytes, bytes.offset, b, offset, bytes.length);
                    len -= bytes.length;
                    offset += bytes.length;
                    fillBuffer();
                }
                System.arraycopy(bytes.bytes, bytes.offset, b, offset, len);
                bytes.offset += len;
                bytes.length -= len;
            }

        };
    } else {
        final BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef();
        decompressor.decompress(fieldsStream, totalLength, offset, length, bytes);
        assert bytes.length == length;
        documentInput = new ByteArrayDataInput(bytes.bytes, bytes.offset, bytes.length);
    }

    for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++) {
        final long infoAndBits = documentInput.readVLong();
        final int fieldNumber = (int) (infoAndBits >>> TYPE_BITS);
        final FieldInfo fieldInfo = fieldInfos.fieldInfo(fieldNumber);

        final int bits = (int) (infoAndBits & TYPE_MASK);
        assert bits <= NUMERIC_DOUBLE : "bits=" + Integer.toHexString(bits);

        //get restricted
        FieldVisibility cv = RestrictedStoredFieldVisitor.EMPTY;
        boolean isRestricted = documentInput.readByte() == 1;
        if (isRestricted) {
            int cv_length = documentInput.readVInt();
            byte[] cv_bytes = new byte[cv_length];
            documentInput.readBytes(cv_bytes, 0, cv_length);
            cv = new FieldVisibility(cv_bytes);
        }

        RestrictedStoredFieldVisitor restrictedStoredFieldVisitor = DelegatingRestrictedFieldVisitor
                .wrap(visitor);
        if (evaluate(cv)) {
            switch (restrictedStoredFieldVisitor.needsField(fieldInfo, cv)) {
            case YES:
                readField(documentInput, restrictedStoredFieldVisitor, fieldInfo, bits, cv);
                break;
            case NO:
                skipField(documentInput, bits, cv);
                break;
            case STOP:
                return;
            }
        } else {
            skipField(documentInput, bits, cv);
        }
    }
}

From source file:com.sindicetech.siren.index.codecs.siren10.Siren10PostingsReader.java

License:Open Source License

@Override
public void decodeTerm(long[] empty, DataInput in, FieldInfo fieldInfo, BlockTermState _termState,
        boolean absolute) throws IOException {
    final Siren10TermState termState = (Siren10TermState) _termState;

    termState.blockCount = in.readVInt();

    termState.docIndex.read(in, absolute);

    if (termState.blockCount >= blockSkipMinimum) {
        if (absolute) {
            termState.skipFP = in.readVLong();
        } else {//from w ww .j  av  a  2  s  .  co m
            termState.skipFP += in.readVLong();
        }
    } else if (absolute) {
        termState.skipFP = 0;
    }
}