List of usage examples for org.apache.lucene.store Directory openInput
public abstract IndexInput openInput(String name, IOContext context) throws IOException;
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
private void testEof(String name, Directory directory, long length) throws IOException { IndexInput input = directory.openInput(name, IOContext.DEFAULT); try {//from www.ja v a2 s . com input.seek(length); input.readByte(); fail("should throw eof"); } catch (IOException e) { } }
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException { int reads = random.nextInt(MAX_NUMBER_OF_READS); IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT); IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT); assertEquals(fsInput.length(), hdfsInput.length()); int fileLength = (int) fsInput.length(); for (int i = 0; i < reads; i++) { byte[] fsBuf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength)) + MIN_BUFFER_SIZE];//from ww w . j a va2 s . co m byte[] hdfsBuf = new byte[fsBuf.length]; int offset = random.nextInt(fsBuf.length); int length = random.nextInt(fsBuf.length - offset); int pos = random.nextInt(fileLength - length); fsInput.seek(pos); fsInput.readBytes(fsBuf, offset, length); hdfsInput.seek(pos); hdfsInput.readBytes(hdfsBuf, offset, length); for (int f = offset; f < length; f++) { if (fsBuf[f] != hdfsBuf[f]) { fail(); } } } fsInput.close(); hdfsInput.close(); }
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
private Directory getControlDir(final Directory control, final Directory test) { return new Directory() { @Override// w w w . j ava 2 s .c o m public Lock makeLock(String name) { return control.makeLock(name); } @Override public void clearLock(String name) throws IOException { control.clearLock(name); } @Override public void setLockFactory(LockFactory lockFactory) throws IOException { control.setLockFactory(lockFactory); } @Override public LockFactory getLockFactory() { return control.getLockFactory(); } @Override public String getLockID() { return control.getLockID(); } @Override public void copy(Directory to, String src, String dest, IOContext context) throws IOException { control.copy(to, src, dest, context); } @Override public IndexInputSlicer createSlicer(String name, IOContext context) throws IOException { return control.createSlicer(name, context); } @Override public IndexOutput createOutput(final String name, IOContext context) throws IOException { final IndexOutput testOutput = test.createOutput(name, context); final IndexOutput controlOutput = control.createOutput(name, context); return new IndexOutput() { @Override public void flush() throws IOException { testOutput.flush(); controlOutput.flush(); } @Override public void close() throws IOException { testOutput.close(); controlOutput.close(); } @Override public long getFilePointer() { long filePointer = testOutput.getFilePointer(); long controlFilePointer = controlOutput.getFilePointer(); if (controlFilePointer != filePointer) { System.err.println("Output Name [" + name + "] with filePointer [" + filePointer + "] and control filePointer [" + controlFilePointer + "] does not match"); } return filePointer; } @SuppressWarnings("deprecation") @Override public void seek(long pos) throws IOException { testOutput.seek(pos); controlOutput.seek(pos); } @Override public long length() throws IOException { long length = testOutput.length(); long controlLength = controlOutput.length(); if (controlLength != length) { System.err.println("Ouput Name [" + name + "] with length [" + length + "] and control length [" + controlLength + "] does not match"); } return length; } @Override public void writeByte(byte b) throws IOException { testOutput.writeByte(b); controlOutput.writeByte(b); } @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { testOutput.writeBytes(b, offset, length); controlOutput.writeBytes(b, offset, length); } }; } @Override public IndexInput openInput(final String name, IOContext context) throws IOException { final IndexInput testInput = test.openInput(name, context); final IndexInput controlInput = control.openInput(name, context); return new IndexInputCompare(name, testInput, controlInput); } @Override public String[] listAll() throws IOException { return test.listAll(); } @Override public boolean fileExists(String name) throws IOException { return test.fileExists(name); } @Override public void deleteFile(String name) throws IOException { test.deleteFile(name); control.deleteFile(name); } @Override public long fileLength(String name) throws IOException { long fileLength = test.fileLength(name); long controlFileLength = control.fileLength(name); if (controlFileLength != fileLength) { System.err.println("Input Name [" + name + "] with length [" + fileLength + "] and control length [" + controlFileLength + "] does not match"); } return fileLength; } @Override public void sync(Collection<String> names) throws IOException { test.sync(names); test.sync(names); } @Override public void close() throws IOException { test.close(); control.close(); } }; }
From source file:com.bah.lucene.blockcache.BlockDirectoryTest.java
License:Apache License
private void testEof(String name, Directory directory, long length) throws IOException { IndexInput input = directory.openInput(name, IOContext.DEFAULT); input.seek(length);/* w w w . j a va 2 s . c o m*/ try { input.readByte(); fail("should throw eof"); } catch (IOException e) { } }
From source file:com.bah.lucene.blockcache.BlockDirectoryTest.java
License:Apache License
private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException { int reads = random.nextInt(MAX_NUMBER_OF_READS); IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT); IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT); assertEquals(fsInput.length(), hdfsInput.length()); int fileLength = (int) fsInput.length(); if (fileLength != 0) { for (int i = 0; i < reads; i++) { byte[] fsBuf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength)) + MIN_BUFFER_SIZE];//from www . j a va 2 s .c om byte[] hdfsBuf = new byte[fsBuf.length]; int offset = random.nextInt(fsBuf.length); int length = random.nextInt(fsBuf.length - offset); int pos = random.nextInt(fileLength - length); fsInput.seek(pos); fsInput.readBytes(fsBuf, offset, length); hdfsInput.seek(pos); hdfsInput.readBytes(hdfsBuf, offset, length); for (int f = offset; f < length; f++) { if (fsBuf[f] != hdfsBuf[f]) { fail(Long.toString(seed) + " read [" + i + "]"); } } } } fsInput.close(); hdfsInput.close(); }
From source file:com.browseengine.bobo.geosearch.index.impl.GeoSegmentReader.java
License:Apache License
public GeoSegmentReader(Directory dir, String fileName, int maxDoc, int bufferSize, IGeoRecordSerializer<G> geoRecordSerializer, Comparator<G> geoRecordComparator) throws IOException { this(0, maxDoc, geoRecordSerializer, geoRecordComparator); try {/*from w w w . j a va 2 s. com*/ this.indexInput = dir.openInput(fileName, bufferSize); init(); } catch (FileNotFoundException e) { LOGGER.warn("file not found: " + e + ", treating this as no geoRecords"); init(0, nullCheckChecksValues); } }
From source file:com.kamikaze.lucenecodec.PForDeltaFixedIntBlockWithIntBufferIndexInput.java
License:Apache License
public PForDeltaFixedIntBlockWithIntBufferIndexInput(Directory dir, String fileName, int readBufferSize) throws IOException { super(dir.openInput(fileName, readBufferSize)); }
From source file:com.lucure.core.codec.CompressingStoredFieldsReader.java
License:Apache License
/** Sole constructor. */ public CompressingStoredFieldsReader(Directory d, SegmentInfo si, String segmentSuffix, FieldInfos fn, IOContext context, String formatName, CompressionMode compressionMode) throws IOException { this.compressionMode = compressionMode; final String segment = si.name; boolean success = false; fieldInfos = fn;/* www . j ava 2s .com*/ numDocs = si.getDocCount(); ChecksumIndexInput indexStream = null; try { final String indexStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION); final String fieldsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION); // Load the index into memory indexStream = d.openChecksumInput(indexStreamFN, context); final String codecNameIdx = formatName + CODEC_SFX_IDX; version = CodecUtil.checkHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT); assert CodecUtil.headerLength(codecNameIdx) == indexStream.getFilePointer(); indexReader = new CompressingStoredFieldsIndexReader(indexStream, si); long maxPointer = -1; if (version >= VERSION_CHECKSUM) { maxPointer = indexStream.readVLong(); CodecUtil.checkFooter(indexStream); } else { CodecUtil.checkEOF(indexStream); } indexStream.close(); indexStream = null; // Open the data file and read metadata fieldsStream = d.openInput(fieldsStreamFN, context); if (version >= VERSION_CHECKSUM) { if (maxPointer + CodecUtil.footerLength() != fieldsStream.length()) { throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.length()); } } else { maxPointer = fieldsStream.length(); } this.maxPointer = maxPointer; final String codecNameDat = formatName + CODEC_SFX_DAT; final int fieldsVersion = CodecUtil.checkHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT); if (version != fieldsVersion) { throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion); } assert CodecUtil.headerLength(codecNameDat) == fieldsStream.getFilePointer(); if (version >= VERSION_BIG_CHUNKS) { chunkSize = fieldsStream.readVInt(); } else { chunkSize = -1; } packedIntsVersion = fieldsStream.readVInt(); decompressor = compressionMode.newDecompressor(); this.bytes = new BytesRef(); if (version >= VERSION_CHECKSUM) { // NOTE: data file is too costly to verify checksum against all the bytes on open, // but for now we at least verify proper structure of the checksum footer: which looks // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption // such as file truncation. CodecUtil.retrieveChecksum(fieldsStream); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this, indexStream); } } }
From source file:com.sindicetech.siren.index.codecs.siren10.Siren10BlockStreamFactory.java
License:Open Source License
public DocsFreqBlockIndexInput openDocsFreqInput(final Directory dir, final String fileName, final IOContext context) throws IOException { return new DocsFreqBlockIndexInput(dir.openInput(fileName, context), docsBlockDecompressor, freqBlockDecompressor);//from w w w. jav a2s . co m }
From source file:com.sindicetech.siren.index.codecs.siren10.Siren10BlockStreamFactory.java
License:Open Source License
public NodBlockIndexInput openNodInput(final Directory dir, final String fileName, final IOContext context) throws IOException { return new NodBlockIndexInput(dir.openInput(fileName, context), nodBlockDecompressor); }