List of usage examples for org.apache.lucene.store IndexOutput writeString
public void writeString(String s) throws IOException
From source file:com.browseengine.bobo.geosearch.solo.index.impl.GeoOnlyIndexerTest.java
License:Apache License
private void createIndexFileDummyData() throws IOException { IndexOutput output = directory.createOutput(indexName + ".geo"); output.writeString("dummyData"); output.flush();//from ww w . j av a 2s. c o m }
From source file:com.github.lucene.store.database.DatabaseDirectoryITest.java
License:Apache License
private void addContentIndexOutput(final Directory directory, final String fileName, final String content, final Context context) throws IOException { IOContext ioContext = null;/*from ww w . j a v a 2s . co m*/ switch (context) { case FLUSH: ioContext = new IOContext(new FlushInfo(1, 1)); break; case MERGE: ioContext = new IOContext(new MergeInfo(1, 1, false, 1)); break; default: ioContext = new IOContext(context); break; } final IndexOutput indexOutput = directory.createOutput(fileName, ioContext); indexOutput.writeString(content); indexOutput.close(); }
From source file:com.github.lucene.store.jdbc.JdbcDirectoryGeneralOperationsITest.java
License:Apache License
@Test public void testList() throws IOException { Connection con = DataSourceUtils.getConnection(dataSource); jdbcDirectory.create();// w ww .j a v a 2s .c o m DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); con = DataSourceUtils.getConnection(dataSource); String[] list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(0, list.length); con = DataSourceUtils.getConnection(dataSource); final IndexOutput indexOutput = jdbcDirectory.createOutput("test1", new IOContext()); indexOutput.writeString("TEST STRING"); indexOutput.close(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); jdbcTemplate.executeSelect("select * from test", new JdbcTemplate.ExecuteSelectCallback() { @Override public void fillPrepareStatement(final PreparedStatement ps) throws Exception { } @Override public Object execute(final ResultSet rs) throws Exception { Assert.assertTrue(rs.next()); return null; } }); con = DataSourceUtils.getConnection(dataSource); list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(1, list.length); con = DataSourceUtils.getConnection(dataSource); jdbcDirectory.deleteFile("test1"); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); con = DataSourceUtils.getConnection(dataSource); list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(0, list.length); }
From source file:com.github.lucene.store.jdbc.JdbcDirectoryGeneralOperationsITest.java
License:Apache License
@Test public void testListWithinTransaction() throws IOException { final Connection con = DataSourceUtils.getConnection(dataSource); jdbcDirectory.create();//from w w w . java 2 s .c o m String[] list = jdbcDirectory.listAll(); Assert.assertEquals(0, list.length); final IndexOutput indexOutput = jdbcDirectory.createOutput("test1", new IOContext()); indexOutput.writeString("TEST STRING"); indexOutput.close(); jdbcTemplate.executeSelect("select * from test", new JdbcTemplate.ExecuteSelectCallback() { @Override public void fillPrepareStatement(final PreparedStatement ps) throws Exception { } @Override public Object execute(final ResultSet rs) throws Exception { Assert.assertTrue(rs.next()); return null; } }); list = jdbcDirectory.listAll(); Assert.assertEquals(1, list.length); jdbcDirectory.deleteFile("test1"); list = jdbcDirectory.listAll(); Assert.assertEquals(0, list.length); DataSourceUtils.rollbackConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); }
From source file:com.github.lucene.store.jdbc.JdbcDirectoryGeneralOperationsITest.java
License:Apache License
@Test public void testDeleteContent() throws IOException { Connection con = DataSourceUtils.getConnection(dataSource); jdbcDirectory.create();//from ww w . ja v a2s .com DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); con = DataSourceUtils.getConnection(dataSource); String[] list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(0, list.length); con = DataSourceUtils.getConnection(dataSource); final IndexOutput indexOutput = jdbcDirectory.createOutput("test1", new IOContext()); indexOutput.writeString("TEST STRING"); indexOutput.close(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); con = DataSourceUtils.getConnection(dataSource); list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(1, list.length); con = DataSourceUtils.getConnection(dataSource); jdbcDirectory.deleteContent(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); con = DataSourceUtils.getConnection(dataSource); list = jdbcDirectory.listAll(); DataSourceUtils.commitConnectionIfPossible(con); DataSourceUtils.releaseConnection(con); Assert.assertEquals(0, list.length); }
From source file:di.uniba.it.wsd.dsm.Text2Bin.java
License:Open Source License
/** * Convert a WordSpace text matrix to a bin WordSpace file * Text matrix format://from ww w.j a va2 s . c o m * - the first line contains the matrix dimensions N * - each line contains the word vector information: word d1 d2 ... dN * Text2Bin text_matrix_file bin_matrix_file * @param args the command line arguments */ public static void main(String[] args) { try { BufferedReader in = new BufferedReader(new FileReader(args[0])); File file = new File(args[1]); FSDirectory fs = FSDirectory.open(file.getParentFile()); IndexOutput output = fs.createOutput(file.getName()); String header = in.readLine(); output.writeString("-dimensions"); output.writeInt(Integer.parseInt(header)); while (in.ready()) { String line = in.readLine(); String[] split = line.split("\t"); output.writeString(split[0]); for (int i = 1; i < split.length; i++) { output.writeInt(Float.floatToIntBits(Float.parseFloat(split[i]))); } } in.close(); output.close(); } catch (IOException ex) { Logger.getLogger(Text2Bin.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.apache.blur.lucene.codec.Blur022SegmentInfoWriter.java
License:Apache License
@Override public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException { final String fileName = IndexFileNames.segmentFileName(si.name, "", Blur022SegmentInfoFormat.SI_EXTENSION); si.addFile(fileName);/*from ww w.ja v a 2s.c o m*/ final IndexOutput output = dir.createOutput(fileName, ioContext); boolean success = false; try { CodecUtil.writeHeader(output, Blur022SegmentInfoFormat.CODEC_NAME, Blur022SegmentInfoFormat.VERSION_CURRENT); output.writeString(si.getVersion()); output.writeInt(si.getDocCount()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeStringStringMap(si.getDiagnostics()); Map<String, String> attributes = si.attributes(); TreeMap<String, String> newAttributes = new TreeMap<String, String>(); if (attributes != null) { newAttributes.putAll(attributes); } newAttributes.put(Blur022StoredFieldsFormat.STORED_FIELDS_FORMAT_CHUNK_SIZE, Integer.toString(_compressionChunkSize)); newAttributes.put(Blur022StoredFieldsFormat.STORED_FIELDS_FORMAT_COMPRESSION_MODE, _compressionMode); output.writeStringStringMap(newAttributes); output.writeStringSet(si.files()); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); si.dir.deleteFile(fileName); } else { output.close(); } } }
From source file:org.apache.blur.lucene.warmup.IndexTracerResult.java
License:Apache License
public void write(IndexOutput output) throws IOException { output.writeString(_field); output.writeString(_segmentName);/* w w w . ja v a 2 s. c om*/ writeBoolean(output, _timCaptured); if (_timCaptured) { output.writeVLong(_timPosition); output.writeString(_timFileName); } writeBoolean(output, _docCaptured); if (_docCaptured) { output.writeVLong(_docPosition); output.writeString(_docFileName); } writeBoolean(output, _posCaptured); if (_posCaptured) { output.writeVLong(_posPosition); output.writeString(_posFileName); } writeBoolean(output, _payCaptured); if (_payCaptured) { output.writeVLong(_payPosition); output.writeString(_payFileName); } }
From source file:org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.java
License:Apache License
private synchronized void persist() throws IOException { String fileName = SNAPSHOTS_PREFIX + nextWriteGen; IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT); boolean success = false; try {/*w w w . j a va2 s. c o m*/ CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT); out.writeVInt(nameToDetailsMapping.size()); for (Entry<String, SnapshotMetaData> ent : nameToDetailsMapping.entrySet()) { out.writeString(ent.getKey()); out.writeString(ent.getValue().getIndexDirPath()); out.writeVLong(ent.getValue().getGenerationNumber()); } success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); IOUtils.deleteFilesIgnoringExceptions(dir, fileName); } else { IOUtils.close(out); } } dir.sync(Collections.singletonList(fileName)); if (nextWriteGen > 0) { String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen - 1); // exception OK: likely it didn't exist IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile); } nextWriteGen++; }
From source file:org.codelibs.elasticsearch.search.suggest.completion2x.AnalyzingCompletionLookupProvider.java
License:Apache License
@Override public FieldsConsumer consumer(final IndexOutput output) throws IOException { CodecUtil.writeHeader(output, CODEC_NAME, CODEC_VERSION_LATEST); return new FieldsConsumer() { private Map<String, Long> fieldOffsets = new HashMap<>(); @Override/*from w w w . j a v a 2 s . co m*/ public void close() throws IOException { try { /* * write the offsets per field such that we know where * we need to load the FSTs from */ long pointer = output.getFilePointer(); output.writeVInt(fieldOffsets.size()); for (Map.Entry<String, Long> entry : fieldOffsets.entrySet()) { output.writeString(entry.getKey()); output.writeVLong(entry.getValue()); } output.writeLong(pointer); CodecUtil.writeFooter(output); } finally { IOUtils.close(output); } } @Override public void write(Fields fields) throws IOException { for (String field : fields) { Terms terms = fields.terms(field); if (terms == null) { continue; } terms.iterator(); new SuggestPayload(); throw new UnsupportedOperationException("QueryBuilders does not support this operation."); // final XAnalyzingSuggester.XBuilder builder = new XAnalyzingSuggester.XBuilder( // maxSurfaceFormsPerAnalyzedForm, hasPayloads, XAnalyzingSuggester.PAYLOAD_SEP); // int docCount = 0; // while (true) { // BytesRef term = termsEnum.next(); // if (term == null) { // break; // } // docsEnum = termsEnum.postings(docsEnum, PostingsEnum.PAYLOADS); // builder.startTerm(term); // int docFreq = 0; // while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { // for (int i = 0; i < docsEnum.freq(); i++) { // final int position = docsEnum.nextPosition(); // AnalyzingCompletionLookupProvider.this.parsePayload(docsEnum.getPayload(), spare); // builder.addSurface(spare.surfaceForm.get(), spare.payload.get(), spare.weight); // // multi fields have the same surface form so we sum up here // maxAnalyzedPathsForOneInput = Math.max(maxAnalyzedPathsForOneInput, position + 1); // } // docFreq++; // docCount = Math.max(docCount, docsEnum.docID()+1); // } // builder.finishTerm(docFreq); // } // /* // * Here we are done processing the field and we can // * buid the FST and write it to disk. // */ // FST<Pair<Long, BytesRef>> build = builder.build(); // assert build != null || docCount == 0: "the FST is null but docCount is != 0 actual value: [" + docCount + "]"; // /* // * it's possible that the FST is null if we have 2 segments that get merged // * and all docs that have a value in this field are deleted. This will cause // * a consumer to be created but it doesn't consume any values causing the FSTBuilder // * to return null. // */ // if (build != null) { // fieldOffsets.put(field, output.getFilePointer()); // build.save(output); // /* write some more meta-info */ // output.writeVInt(maxAnalyzedPathsForOneInput); // output.writeVInt(maxSurfaceFormsPerAnalyzedForm); // output.writeInt(maxGraphExpansions); // can be negative // int options = 0; // options |= preserveSep ? SERIALIZE_PRESERVE_SEPARATORS : 0; // options |= hasPayloads ? SERIALIZE_HAS_PAYLOADS : 0; // options |= preservePositionIncrements ? SERIALIZE_PRESERVE_POSITION_INCREMENTS : 0; // output.writeVInt(options); // output.writeVInt(XAnalyzingSuggester.SEP_LABEL); // output.writeVInt(XAnalyzingSuggester.END_BYTE); // output.writeVInt(XAnalyzingSuggester.PAYLOAD_SEP); // output.writeVInt(XAnalyzingSuggester.HOLE_CHARACTER); // } } } }; }