List of usage examples for org.apache.lucene.store IndexOutput writeInt
public void writeInt(int i) throws IOException
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
@Test public void testWritingAndReadingAFile() throws IOException { IndexOutput output = directory.createOutput("testing.test", IOContext.DEFAULT); output.writeInt(12345); output.flush();//from w ww . j av a 2 s .c o m output.close(); IndexInput input = directory.openInput("testing.test", IOContext.DEFAULT); assertEquals(12345, input.readInt()); input.close(); String[] listAll = directory.listAll(); assertEquals(1, listAll.length); assertEquals("testing.test", listAll[0]); assertEquals(4, directory.fileLength("testing.test")); IndexInput input1 = directory.openInput("testing.test", IOContext.DEFAULT); IndexInput input2 = (IndexInput) input1.clone(); assertEquals(12345, input2.readInt()); input2.close(); assertEquals(12345, input1.readInt()); input1.close(); assertFalse(directory.fileExists("testing.test.other")); assertTrue(directory.fileExists("testing.test")); directory.deleteFile("testing.test"); assertFalse(directory.fileExists("testing.test")); }
From source file:com.browseengine.bobo.geosearch.solo.impl.IDGeoRecordSerializer.java
License:Apache License
@Override public void writeGeoRecord(IndexOutput output, IDGeoRecord record, int recordByteCount) throws IOException { if (record.id.length != recordByteCount - INTERLACE_BYTES) { throw new IllegalArgumentException( "Incorrect number of id bytes given. " + "This is most likely a bug! ExpectedBytes=" + (recordByteCount - INTERLACE_BYTES) + "; receivedBytes=" + record.id.length); }/*from www .j a v a 2s . c om*/ output.writeLong(record.highOrder); output.writeInt(record.lowOrder); output.writeBytes(record.id, record.id.length); }
From source file:com.github.lucene.store.jdbc.index.AbstractIndexInputOutputITest.java
License:Apache License
private void insertData() throws IOException { final byte[] test = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }; final IndexOutput indexOutput = jdbcDirectory.createOutput("value1", new IOContext()); indexOutput.writeInt(-1); indexOutput.writeLong(10);/*from ww w . ja va2 s . c o m*/ indexOutput.writeInt(0); indexOutput.writeInt(0); indexOutput.writeBytes(test, 8); indexOutput.writeBytes(test, 5); indexOutput.writeByte((byte) 8); indexOutput.writeBytes(new byte[] { 1, 2 }, 2); indexOutput.close(); }
From source file:com.liferay.portal.search.lucene.dump.IndexCommitSerializationUtil.java
License:Open Source License
private static void writeSegmentsGen(Directory directory, long generation) throws IOException { if (_log.isDebugEnabled()) { _log.debug("Writing " + _SEGMENTS_GEN_FILE_NAME + " with generation " + generation); }/*from ww w . j a v a 2s . c o m*/ IndexOutput indexOutput = directory.createOutput(_SEGMENTS_GEN_FILE_NAME); try { indexOutput.writeInt(SegmentInfos.FORMAT_LOCKLESS); indexOutput.writeLong(generation); indexOutput.writeLong(generation); } finally { indexOutput.close(); } }
From source file:com.nearinfinity.bloomfilter.bitset.ThreadSafeBitSet.java
License:Apache License
@Override public void write(IndexOutput output) throws IOException { int length = bits.length(); output.writeInt(length); for (int i = 0; i < length; i++) { output.writeLong(bits.get(i));//ww w . j av a 2s .co m } }
From source file:com.nearinfinity.bloomfilter.BloomFilter.java
License:Apache License
public void write(IndexOutput output) throws IOException { output.writeLong(numberOfBitsDivBy2); output.writeLong(elementSize);/*from www . j ava2 s . c o m*/ output.writeLong(Double.doubleToLongBits(probabilityOfFalsePositives)); output.writeInt(hashes); output.writeInt(numberOfBits); bitSet.write(output); }
From source file:com.sindicetech.siren.index.codecs.siren10.Siren10PostingsWriter.java
License:Open Source License
@Override public void init(final IndexOutput termsOut) throws IOException { CodecUtil.writeHeader(termsOut, CODEC, VERSION_CURRENT); termsOut.writeInt(blockSkipInterval); // write skipInterval termsOut.writeInt(maxSkipLevels); // write maxSkipLevels termsOut.writeInt(blockSkipMinimum); // write skipMinimum termsOut.writeInt(maxBlockSize); // write maxBlockSize }
From source file:com.zimbra.cs.index.LuceneIndexRepair.java
License:Open Source License
private void commit(long gen) throws IOException { IndexOutput output = directory.createOutput(SEGMENTS_GEN); try {// ww w . ja va2 s . co m output.writeInt(SegmentInfos.FORMAT_LOCKLESS); output.writeLong(gen); output.writeLong(gen); } finally { output.close(); } directory.sync(Collections.singleton(SEGMENTS_GEN)); }
From source file:di.uniba.it.wsd.dsm.Text2Bin.java
License:Open Source License
/** * Convert a WordSpace text matrix to a bin WordSpace file * Text matrix format:/*w w w . jav a 2 s .c om*/ * - the first line contains the matrix dimensions N * - each line contains the word vector information: word d1 d2 ... dN * Text2Bin text_matrix_file bin_matrix_file * @param args the command line arguments */ public static void main(String[] args) { try { BufferedReader in = new BufferedReader(new FileReader(args[0])); File file = new File(args[1]); FSDirectory fs = FSDirectory.open(file.getParentFile()); IndexOutput output = fs.createOutput(file.getName()); String header = in.readLine(); output.writeString("-dimensions"); output.writeInt(Integer.parseInt(header)); while (in.ready()) { String line = in.readLine(); String[] split = line.split("\t"); output.writeString(split[0]); for (int i = 1; i < split.length; i++) { output.writeInt(Float.floatToIntBits(Float.parseFloat(split[i]))); } } in.close(); output.close(); } catch (IOException ex) { Logger.getLogger(Text2Bin.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.apache.blur.lucene.codec.Blur022SegmentInfoWriter.java
License:Apache License
@Override public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException { final String fileName = IndexFileNames.segmentFileName(si.name, "", Blur022SegmentInfoFormat.SI_EXTENSION); si.addFile(fileName);//from ww w. j a v a 2 s . c o m final IndexOutput output = dir.createOutput(fileName, ioContext); boolean success = false; try { CodecUtil.writeHeader(output, Blur022SegmentInfoFormat.CODEC_NAME, Blur022SegmentInfoFormat.VERSION_CURRENT); output.writeString(si.getVersion()); output.writeInt(si.getDocCount()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeStringStringMap(si.getDiagnostics()); Map<String, String> attributes = si.attributes(); TreeMap<String, String> newAttributes = new TreeMap<String, String>(); if (attributes != null) { newAttributes.putAll(attributes); } newAttributes.put(Blur022StoredFieldsFormat.STORED_FIELDS_FORMAT_CHUNK_SIZE, Integer.toString(_compressionChunkSize)); newAttributes.put(Blur022StoredFieldsFormat.STORED_FIELDS_FORMAT_COMPRESSION_MODE, _compressionMode); output.writeStringStringMap(newAttributes); output.writeStringSet(si.files()); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); si.dir.deleteFile(fileName); } else { output.close(); } } }