List of usage examples for org.apache.lucene.store IndexInput getFilePointer
public abstract long getFilePointer();
From source file:com.lucure.core.codec.ForUtil.java
License:Apache License
/** * Skip the next block of data./*from w w w . ja va2s .c o m*/ * * @param in the input where to read data * @throws IOException If there is a low-level I/O error */ void skipBlock(IndexInput in) throws IOException { final int numBits = in.readByte(); if (numBits == ALL_VALUES_EQUAL) { in.readVInt(); return; } assert numBits > 0 && numBits <= 32 : numBits; final int encodedSize = encodedSizes[numBits]; in.seek(in.getFilePointer() + encodedSize); }
From source file:com.nearinfinity.mele.zookeeper.ZookeeperWrapperDirectory.java
License:Apache License
private IndexInput wrapRef(final String name, final IndexInput indexInput) { final String refPath = ZookeeperIndexDeletionPolicy.createRef(zk, indexRefPath, name); return new IndexInput() { @Override// w w w . java 2 s .com public void close() throws IOException { indexInput.close(); ZookeeperIndexDeletionPolicy.removeRef(zk, refPath); } @Override public long getFilePointer() { return indexInput.getFilePointer(); } @Override public long length() { return indexInput.length(); } @Override public byte readByte() throws IOException { return indexInput.readByte(); } @Override public void readBytes(byte[] b, int offset, int len) throws IOException { indexInput.readBytes(b, offset, len); } @Override public void seek(long pos) throws IOException { indexInput.seek(pos); } @Override public Object clone() { return indexInput.clone(); } }; }
From source file:org.apache.blur.lucene.codec.Blur022SegmentInfoReader.java
License:Apache License
@Override public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Blur022SegmentInfoFormat.SI_EXTENSION); final IndexInput input = dir.openInput(fileName, context); boolean success = false; try {/*from w w w.j a va 2 s . c o m*/ CodecUtil.checkHeader(input, Blur022SegmentInfoFormat.CODEC_NAME, Blur022SegmentInfoFormat.VERSION_START, Blur022SegmentInfoFormat.VERSION_CURRENT); final String version = input.readString(); final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")"); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String, String> diagnostics = input.readStringStringMap(); final Map<String, String> attributes = input.readStringStringMap(); final Set<String> files = input.readStringSet(); if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, Collections.unmodifiableMap(attributes)); si.setFiles(files); success = true; return si; } finally { if (!success) { IOUtils.closeWhileHandlingException(input); } else { input.close(); } } }
From source file:org.apache.blur.lucene.codec.CachedDecompressor.java
License:Apache License
@Override public void decompress(final DataInput in, final int originalLength, final int offset, final int length, final BytesRef bytes) throws IOException { if (in instanceof IndexInput) { IndexInput indexInput = (IndexInput) in; String name = indexInput.toString(); long filePointer = indexInput.getFilePointer(); Entry entry = _entry.get();/*ww w . j a v a2 s. co m*/ if (!entry.isValid(indexInput, name, filePointer)) { entry.setup(indexInput, name, filePointer); entry._cache.grow(originalLength + 7); _decompressor.decompress(indexInput, originalLength, 0, originalLength, entry._cache); entry._cache.length = originalLength; entry._cache.offset = 0; _entry.set(entry); } if (bytes.bytes.length < originalLength + 7) { bytes.bytes = new byte[ArrayUtil.oversize(originalLength + 7, 1)]; } System.arraycopy(entry._cache.bytes, entry._cache.offset, bytes.bytes, 0, length + offset); bytes.offset = offset; bytes.length = length; } else { _decompressor.decompress(in, originalLength, offset, length, bytes); } }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectoryTest.java
License:Apache License
private void assertClosed(IndexInput input) throws IOException { try {/*from w ww.j a v a 2 s . co m*/ input.length(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.seek(0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.getFilePointer(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readShort(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readByte(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readString(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringSet(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringStringMap(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0, false); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } }
From source file:org.codelibs.elasticsearch.common.lucene.store.InputStreamIndexInput.java
License:Apache License
public InputStreamIndexInput(IndexInput indexInput, long limit) { this.indexInput = indexInput; this.limit = limit; if ((indexInput.length() - indexInput.getFilePointer()) > limit) { actualSizeToRead = limit;/*from w w w. j a v a2s.c om*/ } else { actualSizeToRead = indexInput.length() - indexInput.getFilePointer(); } }
From source file:org.elasticsearch.common.compress.CompressedIndexInput.java
License:Apache License
public CompressedIndexInput(IndexInput in, T context) throws IOException { super("compressed(" + in.toString() + ")"); this.in = in; this.context = context; readHeader(in);/*w ww. ja v a2 s.com*/ this.version = in.readInt(); long metaDataPosition = in.readLong(); long headerLength = in.getFilePointer(); in.seek(metaDataPosition); this.totalUncompressedLength = in.readVLong(); int size = in.readVInt(); offsets = BigArrays.newLongArray(size); for (int i = 0; i < size; i++) { offsets.set(i, in.readVLong()); } this.currentOffsetIdx = -1; this.currentUncompressedChunkPointer = 0; in.seek(headerLength); }
From source file:org.elasticsearch.common.compress.lzf.LZFCompressor.java
License:Apache License
@Override public boolean isCompressed(IndexInput in) throws IOException { long currentPointer = in.getFilePointer(); // since we have some metdata before the first compressed header, we check on our specific header if (in.length() - currentPointer < (LUCENE_HEADER.length)) { return false; }//from ww w .j av a 2s .c o m for (int i = 0; i < LUCENE_HEADER.length; i++) { if (in.readByte() != LUCENE_HEADER[i]) { in.seek(currentPointer); return false; } } in.seek(currentPointer); return true; }
From source file:org.elasticsearch.common.compress.snappy.SnappyCompressor.java
License:Apache License
@Override public boolean isCompressed(IndexInput in) throws IOException { long currentPointer = in.getFilePointer(); // since we have some metdata before the first compressed header, we check on our specific header if (in.length() - currentPointer < (HEADER.length)) { return false; }/*from w w w.j a va 2 s. c om*/ for (int i = 0; i < HEADER.length; i++) { if (in.readByte() != HEADER[i]) { in.seek(currentPointer); return false; } } in.seek(currentPointer); return true; }
From source file:org.elasticsearch.common.lucene.store.ByteArrayIndexInputTests.java
License:Apache License
private byte[] randomReadAndSlice(IndexInput indexInput, int length) throws IOException { int readPos = (int) indexInput.getFilePointer(); byte[] output = new byte[length]; while (readPos < length) { switch (randomIntBetween(0, 3)) { case 0://from w ww .j a v a 2 s . c om // Read by one byte at a time output[readPos++] = indexInput.readByte(); break; case 1: // Read several bytes into target int len = randomIntBetween(1, length - readPos); indexInput.readBytes(output, readPos, len); readPos += len; break; case 2: // Read several bytes into 0-offset target len = randomIntBetween(1, length - readPos); byte[] temp = new byte[len]; indexInput.readBytes(temp, 0, len); System.arraycopy(temp, 0, output, readPos, len); readPos += len; break; case 3: // Read using slice len = randomIntBetween(1, length - readPos); IndexInput slice = indexInput .slice("slice (" + readPos + ", " + len + ") of " + indexInput.toString(), readPos, len); temp = randomReadAndSlice(slice, len); // assert that position in the original input didn't change assertEquals(readPos, indexInput.getFilePointer()); System.arraycopy(temp, 0, output, readPos, len); readPos += len; indexInput.seek(readPos); assertEquals(readPos, indexInput.getFilePointer()); break; default: fail(); } assertEquals((long) readPos, indexInput.getFilePointer()); } return output; }