List of usage examples for org.apache.lucene.store IndexInput clone
@Override
public IndexInput clone()
Warning: Lucene never closes cloned IndexInput s, it will only call #close() on the original object.
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
@Test public void testWritingAndReadingAFile() throws IOException { IndexOutput output = directory.createOutput("testing.test", IOContext.DEFAULT); output.writeInt(12345);// w w w.j a v a2 s .c o m output.flush(); output.close(); IndexInput input = directory.openInput("testing.test", IOContext.DEFAULT); assertEquals(12345, input.readInt()); input.close(); String[] listAll = directory.listAll(); assertEquals(1, listAll.length); assertEquals("testing.test", listAll[0]); assertEquals(4, directory.fileLength("testing.test")); IndexInput input1 = directory.openInput("testing.test", IOContext.DEFAULT); IndexInput input2 = (IndexInput) input1.clone(); assertEquals(12345, input2.readInt()); input2.close(); assertEquals(12345, input1.readInt()); input1.close(); assertFalse(directory.fileExists("testing.test.other")); assertTrue(directory.fileExists("testing.test")); directory.deleteFile("testing.test"); assertFalse(directory.fileExists("testing.test")); }
From source file:com.bah.lucene.blockcache_v2.CacheIndexInputTest.java
License:Apache License
@Test public void test2() throws IOException { Cache cache = getCache();/*from w w w.java2 s . co m*/ RAMDirectory directory = new RAMDirectory(); Random random = new Random(seed); String name = "test2"; long size = (10 * 1024 * 1024) + 13; IndexOutput output = directory.createOutput(name, IOContext.DEFAULT); writeRandomData(size, random, output); output.close(); IndexInput input = directory.openInput(name, IOContext.DEFAULT); IndexInput testInput = new CacheIndexInput(null, name, input.clone(), cache); readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset); readRandomDataShort(input, testInput, random, sampleSize); readRandomDataInt(input, testInput, random, sampleSize); readRandomDataLong(input, testInput, random, sampleSize); testInput.close(); input.close(); directory.close(); }
From source file:com.nearinfinity.mele.zookeeper.ZookeeperWrapperDirectory.java
License:Apache License
private IndexInput wrapRef(final String name, final IndexInput indexInput) { final String refPath = ZookeeperIndexDeletionPolicy.createRef(zk, indexRefPath, name); return new IndexInput() { @Override// w w w .ja v a 2s . c om public void close() throws IOException { indexInput.close(); ZookeeperIndexDeletionPolicy.removeRef(zk, refPath); } @Override public long getFilePointer() { return indexInput.getFilePointer(); } @Override public long length() { return indexInput.length(); } @Override public byte readByte() throws IOException { return indexInput.readByte(); } @Override public void readBytes(byte[] b, int offset, int len) throws IOException { indexInput.readBytes(b, offset, len); } @Override public void seek(long pos) throws IOException { indexInput.seek(pos); } @Override public Object clone() { return indexInput.clone(); } }; }
From source file:com.rocana.lucene.codec.v1.RocanaFieldReader.java
License:Apache License
RocanaFieldReader(RocanaBlockTreeTermsReader parent, FieldInfo fieldInfo, long numTerms, BytesRef rootCode, long sumTotalTermFreq, long sumDocFreq, int docCount, long indexStartFP, int longsSize, IndexInput indexIn, BytesRef minTerm, BytesRef maxTerm) throws IOException { assert numTerms > 0; this.fieldInfo = fieldInfo; //DEBUG = RocanaBlockTreeTermsReader.DEBUG && fieldInfo.name.equals("id"); this.parent = parent; this.numTerms = numTerms; this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; this.indexStartFP = indexStartFP; this.rootCode = rootCode; this.longsSize = longsSize; this.minTerm = minTerm; this.maxTerm = maxTerm; // if (DEBUG) { // System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor); // }/*from www . j a va 2 s .c om*/ rootBlockFP = (new ByteArrayDataInput(rootCode.bytes, rootCode.offset, rootCode.length)) .readVLong() >>> RocanaBlockTreeTermsReader.OUTPUT_FLAGS_NUM_BITS; if (indexIn != null) { final IndexInput clone = indexIn.clone(); //System.out.println("start=" + indexStartFP + " field=" + fieldInfo.name); clone.seek(indexStartFP); index = new FST<>(clone, ByteSequenceOutputs.getSingleton()); /* if (false) { final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); Util.toDot(index, w, false, false); System.out.println("FST INDEX: SAVED to " + dotFileName); w.close(); } */ } else { index = null; } }
From source file:org.apache.blur.store.BaseDirectoryTestSuite.java
License:Apache License
@Test public void testLongReadAndClone() throws IOException { FSDirectory control = FSDirectory.open(fileControl); Directory dir = getControlDir(control, directory); String name = writeFile(dir, 10 * 1000 * 1000); IndexInput input = dir.openInput(name, IOContext.DEFAULT); readFile(input, 1000 * 1000);//from ww w . ja v a 2 s . c o m IndexInput clone = input.clone(); clone.readByte(); input.close(); }
From source file:org.apache.blur.store.hdfs.HdfsDirectoryResourceTest.java
License:Apache License
private void executeReads(HdfsDirectory dir, String name) throws IOException, InterruptedException { IndexInput input = dir.openInput(name, IOContext.READ); assertResourceCount(1);// ww w. j av a2 s .c o m input.readLong(); input.seek(0L); for (int i = 0; i < 2; i++) { readSeq(input.clone(), READ_SIZE); assertResourceCount(1 + i + 1); } input.close(); }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.OakDirectoryTest.java
License:Apache License
@Test public void testCloseOnClonedIndexInputs() throws Exception { Directory dir = createDir(builder, false); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int dataSize = 1024; List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); }/*from www . j a va 2 s. c o m*/ file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); IndexInput clone1 = input.clone(); IndexInput clone2 = input.clone(); input.close(); assertClosed(input); assertClosed(clone1); assertClosed(clone2); }
From source file:org.apache.solr.store.hdfs.HdfsDirectoryTest.java
License:Apache License
@Test public void testWritingAndReadingAFile() throws IOException { String[] listAll = directory.listAll(); for (String file : listAll) { directory.deleteFile(file);//from www . j av a 2 s. c om } IndexOutput output = directory.createOutput("testing.test", new IOContext()); output.writeInt(12345); output.flush(); output.close(); IndexInput input = directory.openInput("testing.test", new IOContext()); assertEquals(12345, input.readInt()); input.close(); listAll = directory.listAll(); assertEquals(1, listAll.length); assertEquals("testing.test", listAll[0]); assertEquals(4, directory.fileLength("testing.test")); IndexInput input1 = directory.openInput("testing.test", new IOContext()); IndexInput input2 = (IndexInput) input1.clone(); assertEquals(12345, input2.readInt()); input2.close(); assertEquals(12345, input1.readInt()); input1.close(); assertFalse(directory.fileExists("testing.test.other")); assertTrue(directory.fileExists("testing.test")); directory.deleteFile("testing.test"); assertFalse(directory.fileExists("testing.test")); }