Example usage for org.apache.lucene.store DataInput readBytes

List of usage examples for org.apache.lucene.store DataInput readBytes

Introduction

In this page you can find the example usage for org.apache.lucene.store DataInput readBytes.

Prototype

public abstract void readBytes(byte[] b, int offset, int len) throws IOException;

Source Link

Document

Reads a specified number of bytes into an array at the specified offset.

Usage

From source file:com.github.cstoku.neologd.unidic.lucene.analysis.ja.dict.CharacterDefinition.java

License:Apache License

private CharacterDefinition() throws IOException {
    InputStream is = null;/*from  www.j av  a  2  s.  co m*/
    boolean success = false;
    try {
        is = BinaryDictionary.getClassResource(getClass(), FILENAME_SUFFIX);
        is = new BufferedInputStream(is);
        final DataInput in = new InputStreamDataInput(is);
        CodecUtil.checkHeader(in, HEADER, VERSION, VERSION);
        in.readBytes(characterCategoryMap, 0, characterCategoryMap.length);
        for (int i = 0; i < CLASS_COUNT; i++) {
            final byte b = in.readByte();
            invokeMap[i] = (b & 0x01) != 0;
            groupMap[i] = (b & 0x02) != 0;
        }
        success = true;
    } finally {
        if (success) {
            IOUtils.close(is);
        } else {
            IOUtils.closeWhileHandlingException(is);
        }
    }
}

From source file:com.lucure.core.codec.CompressingStoredFieldsReader.java

License:Apache License

private static void readField(DataInput in, RestrictedStoredFieldVisitor visitor, FieldInfo info, int bits,
        FieldVisibility fieldVisibility) throws IOException {

    switch (bits & TYPE_MASK) {
    case BYTE_ARR:
        int length = in.readVInt();
        byte[] data = new byte[length];
        in.readBytes(data, 0, length);
        visitor.binaryField(info, data, fieldVisibility);
        break;/* www . j a  v a2  s . c o  m*/
    case STRING:
        length = in.readVInt();
        data = new byte[length];
        in.readBytes(data, 0, length);
        visitor.stringField(info, new String(data, IOUtils.CHARSET_UTF_8), fieldVisibility);
        break;
    case NUMERIC_INT:
        int intValue = in.readInt();
        visitor.intField(info, intValue, fieldVisibility);
        break;
    case NUMERIC_FLOAT:
        float floatValue = Float.intBitsToFloat(in.readInt());
        visitor.floatField(info, floatValue, fieldVisibility);
        break;
    case NUMERIC_LONG:
        long longValue = in.readLong();
        visitor.longField(info, longValue, fieldVisibility);
        break;
    case NUMERIC_DOUBLE:
        double doubleValue = Double.longBitsToDouble(in.readLong());
        visitor.doubleField(info, doubleValue, fieldVisibility);
        break;
    default:
        throw new AssertionError("Unknown type flag: " + Integer.toHexString(bits));
    }
}

From source file:com.lucure.core.codec.CompressingStoredFieldsReader.java

License:Apache License

@Override
public void visitDocument(int docID, StoredFieldVisitor visitor) throws IOException {
    fieldsStream.seek(indexReader.getStartPointer(docID));

    final int docBase = fieldsStream.readVInt();
    final int chunkDocs = fieldsStream.readVInt();
    if (docID < docBase || docID >= docBase + chunkDocs || docBase + chunkDocs > numDocs) {
        throw new CorruptIndexException("Corrupted: docID=" + docID + ", docBase=" + docBase + ", chunkDocs="
                + chunkDocs + ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")");
    }//from   w w  w.ja  v a2  s.c  o  m

    final int numStoredFields, offset, length, totalLength;
    if (chunkDocs == 1) {
        numStoredFields = fieldsStream.readVInt();
        offset = 0;
        length = fieldsStream.readVInt();
        totalLength = length;
    } else {
        final int bitsPerStoredFields = fieldsStream.readVInt();
        if (bitsPerStoredFields == 0) {
            numStoredFields = fieldsStream.readVInt();
        } else if (bitsPerStoredFields > 31) {
            throw new CorruptIndexException(
                    "bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
        } else {
            final long filePointer = fieldsStream.getFilePointer();
            final PackedInts.Reader reader = PackedInts.getDirectReaderNoHeader(fieldsStream,
                    PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
            numStoredFields = (int) (reader.get(docID - docBase));
            fieldsStream.seek(filePointer
                    + PackedInts.Format.PACKED.byteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
        }

        final int bitsPerLength = fieldsStream.readVInt();
        if (bitsPerLength == 0) {
            length = fieldsStream.readVInt();
            offset = (docID - docBase) * length;
            totalLength = chunkDocs * length;
        } else if (bitsPerStoredFields > 31) {
            throw new CorruptIndexException(
                    "bitsPerLength=" + bitsPerLength + " (resource=" + fieldsStream + ")");
        } else {
            final PackedInts.ReaderIterator it = PackedInts.getReaderIteratorNoHeader(fieldsStream,
                    PackedInts.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
            int off = 0;
            for (int i = 0; i < docID - docBase; ++i) {
                off += it.next();
            }
            offset = off;
            length = (int) it.next();
            off += length;
            for (int i = docID - docBase + 1; i < chunkDocs; ++i) {
                off += it.next();
            }
            totalLength = off;
        }
    }

    if ((length == 0) != (numStoredFields == 0)) {
        throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields
                + " (resource=" + fieldsStream + ")");
    }
    if (numStoredFields == 0) {
        // nothing to do
        return;
    }

    final DataInput documentInput;
    if (version >= VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize) {
        assert chunkSize > 0;
        assert offset < chunkSize;

        decompressor.decompress(fieldsStream, chunkSize, offset, Math.min(length, chunkSize - offset), bytes);
        documentInput = new DataInput() {

            int decompressed = bytes.length;

            void fillBuffer() throws IOException {
                assert decompressed <= length;
                if (decompressed == length) {
                    throw new EOFException();
                }
                final int toDecompress = Math.min(length - decompressed, chunkSize);
                decompressor.decompress(fieldsStream, toDecompress, 0, toDecompress, bytes);
                decompressed += toDecompress;
            }

            @Override
            public byte readByte() throws IOException {
                if (bytes.length == 0) {
                    fillBuffer();
                }
                --bytes.length;
                return bytes.bytes[bytes.offset++];
            }

            @Override
            public void readBytes(byte[] b, int offset, int len) throws IOException {
                while (len > bytes.length) {
                    System.arraycopy(bytes.bytes, bytes.offset, b, offset, bytes.length);
                    len -= bytes.length;
                    offset += bytes.length;
                    fillBuffer();
                }
                System.arraycopy(bytes.bytes, bytes.offset, b, offset, len);
                bytes.offset += len;
                bytes.length -= len;
            }

        };
    } else {
        final BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef();
        decompressor.decompress(fieldsStream, totalLength, offset, length, bytes);
        assert bytes.length == length;
        documentInput = new ByteArrayDataInput(bytes.bytes, bytes.offset, bytes.length);
    }

    for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++) {
        final long infoAndBits = documentInput.readVLong();
        final int fieldNumber = (int) (infoAndBits >>> TYPE_BITS);
        final FieldInfo fieldInfo = fieldInfos.fieldInfo(fieldNumber);

        final int bits = (int) (infoAndBits & TYPE_MASK);
        assert bits <= NUMERIC_DOUBLE : "bits=" + Integer.toHexString(bits);

        //get restricted
        FieldVisibility cv = RestrictedStoredFieldVisitor.EMPTY;
        boolean isRestricted = documentInput.readByte() == 1;
        if (isRestricted) {
            int cv_length = documentInput.readVInt();
            byte[] cv_bytes = new byte[cv_length];
            documentInput.readBytes(cv_bytes, 0, cv_length);
            cv = new FieldVisibility(cv_bytes);
        }

        RestrictedStoredFieldVisitor restrictedStoredFieldVisitor = DelegatingRestrictedFieldVisitor
                .wrap(visitor);
        if (evaluate(cv)) {
            switch (restrictedStoredFieldVisitor.needsField(fieldInfo, cv)) {
            case YES:
                readField(documentInput, restrictedStoredFieldVisitor, fieldInfo, bits, cv);
                break;
            case NO:
                skipField(documentInput, bits, cv);
                break;
            case STOP:
                return;
            }
        } else {
            skipField(documentInput, bits, cv);
        }
    }
}

From source file:org.apache.blur.lucene.fst.ByteArrayPrimitive.java

License:Apache License

@Override
public void readBytes(DataInput in, int offset, int length) throws IOException {
    in.readBytes(_bytes, offset, length);
}