List of usage examples for org.apache.lucene.store RAMDirectory RAMDirectory
public RAMDirectory()
From source file:com.orientechnologies.spatial.sandbox.LuceneGeoTest.java
License:Apache License
@Test public void geoIntersectTest() throws IOException, ParseException { RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategy( new GeohashPrefixTree(JtsSpatialContext.GEO, 11), "location"); strategy.setDistErrPct(0);// w w w .j a v a 2s . c om IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer()); final RAMDirectory directory = new RAMDirectory(); final IndexWriter writer = new IndexWriter(directory, conf); Shape point = JtsSpatialContext.GEO.getWktShapeParser().parse("POINT (9.4714708 47.6819432)"); Shape polygon = JtsSpatialContext.GEO.getWktShapeParser().parse( "POLYGON((9.481201171875 47.64885294675266,9.471416473388672 47.65128140482982,9.462661743164062 47.64781214443791,9.449443817138672 47.656947367880335,9.445838928222656 47.66110972448931,9.455795288085938 47.667352637215,9.469013214111328 47.67255449415724,9.477081298828125 47.679142768657066,9.490299224853516 47.678680460743834,9.506263732910156 47.679258344995326,9.51364517211914 47.68191653011071,9.518795013427734 47.677177931734406,9.526691436767578 47.679489496903706,9.53390121459961 47.67139857075435,9.50918197631836 47.66180341832901,9.50815200805664 47.6529003141482,9.51192855834961 47.64654002455372,9.504375457763672 47.64237650648966,9.49270248413086 47.649662445325035,9.48617935180664 47.65151268066222,9.481201171875 47.64885294675266))"); Document doc = new Document(); Assert.assertNotEquals(point.relate(polygon), SpatialRelation.INTERSECTS); for (IndexableField f : strategy.createIndexableFields(point)) { doc.add(f); } writer.addDocument(doc); writer.commit(); SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, polygon.getBoundingBox()); Filter filter = strategy.makeFilter(args); IndexReader reader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(reader); TopDocs search = searcher.search(new MatchAllDocsQuery(), filter, 1000); Assert.assertEquals(search.totalHits, 0); reader.close(); writer.close(); }
From source file:com.orientechnologies.spatial.sandbox.LuceneGeoTest.java
License:Apache License
@Test public void geoSpeedTest() throws IOException, ParseException { RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategy( new GeohashPrefixTree(JtsSpatialContext.GEO, 11), "location"); IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer()); final RAMDirectory directory = new RAMDirectory(); final IndexWriter writer = new IndexWriter(directory, conf); Shape multiPolygon = JtsSpatialContext.GEO.getWktShapeParser().parse( "MULTIPOLYGON (((15.520376 38.231155, 15.160243 37.444046, 15.309898 37.134219, 15.099988 36.619987, 14.335229 36.996631, 13.826733 37.104531, 12.431004 37.61295, 12.570944 38.126381, 13.741156 38.034966, 14.761249 38.143874, 15.520376 38.231155)), ((9.210012 41.209991, 9.809975 40.500009, 9.669519 39.177376, 9.214818 39.240473, 8.806936 38.906618, 8.428302 39.171847, 8.388253 40.378311, 8.159998 40.950007, 8.709991 40.899984, 9.210012 41.209991)), ((12.376485 46.767559, 13.806475 46.509306, 13.69811 46.016778, 13.93763 45.591016, 13.141606 45.736692, 12.328581 45.381778, 12.383875 44.885374, 12.261453 44.600482, 12.589237 44.091366, 13.526906 43.587727, 14.029821 42.761008, 15.14257 41.95514, 15.926191 41.961315, 16.169897 41.740295, 15.889346 41.541082, 16.785002 41.179606, 17.519169 40.877143, 18.376687 40.355625, 18.480247 40.168866, 18.293385 39.810774, 17.73838 40.277671, 16.869596 40.442235, 16.448743 39.795401, 17.17149 39.4247, 17.052841 38.902871, 16.635088 38.843572, 16.100961 37.985899, 15.684087 37.908849, 15.687963 38.214593, 15.891981 38.750942, 16.109332 38.964547, 15.718814 39.544072, 15.413613 40.048357, 14.998496 40.172949, 14.703268 40.60455, 14.060672 40.786348, 13.627985 41.188287, 12.888082 41.25309, 12.106683 41.704535, 11.191906 42.355425, 10.511948 42.931463, 10.200029 43.920007, 9.702488 44.036279, 8.888946 44.366336, 8.428561 44.231228, 7.850767 43.767148, 7.435185 43.693845, 7.549596 44.127901, 7.007562 44.254767, 6.749955 45.028518, 7.096652 45.333099, 6.802355 45.70858, 6.843593 45.991147, 7.273851 45.776948, 7.755992 45.82449, 8.31663 46.163642, 8.489952 46.005151, 8.966306 46.036932, 9.182882 46.440215, 9.922837 46.314899, 10.363378 46.483571, 10.442701 46.893546, 11.048556 46.751359, 11.164828 46.941579, 12.153088 47.115393, 12.376485 46.767559)))"); Document doc = new Document(); for (IndexableField f : strategy.createIndexableFields(multiPolygon)) { doc.add(f);/*w w w. j a va 2 s . com*/ } writer.addDocument(doc); writer.commit(); writer.close(); }
From source file:com.orientechnologies.spatial.sandbox.LuceneGeoTest.java
License:Apache License
@Test public void geoSpeedTestInternal() throws IOException, ParseException { RecursivePrefixTreeStrategy strategy = new RecursivePrefixTreeStrategy( new GeohashPrefixTree(JtsSpatialContext.GEO, 11), "location"); IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer()); final RAMDirectory directory = new RAMDirectory(); final IndexWriter writer = new IndexWriter(directory, conf); ODocument entries = loadMultiPolygon(); OMultiPolygonShapeBuilder builder = new OMultiPolygonShapeBuilder(); Shape multiPolygon = builder.fromDoc(entries); Document doc = new Document(); for (IndexableField f : strategy.createIndexableFields(multiPolygon)) { doc.add(f);//from w w w . j a v a2s.c om } writer.addDocument(doc); writer.commit(); writer.close(); }
From source file:com.ostrichemulators.semtool.rdf.engine.util.EngineConsistencyChecker.java
/** * Resolves "near" matches from the elements of the given type. If * {@link #across} is <code>true</code>, each element will be compared to all * elements of all types./*w w w . j av a 2 s .co m*/ * * @param uri the concept/relation class (not instance) to resolve * @param minDistance the minimum allowable similarity * @return map of uri-to-hits */ public MultiMap<IRI, Hit> check(IRI uri, final float minDistance) { MultiMap<IRI, Hit> hits = new MultiMap<>(); // get our universe of possible hits Map<IRI, String> possibles = getHitUniverse(uri); MultiMap<String, IRI> revpos = MultiMap.flip(possibles); Directory ramdir = new RAMDirectory(); StandardAnalyzer analyzer = null; SpellChecker speller = null; List<IRI> errors = new ArrayList<>(); try { analyzer = new StandardAnalyzer(); IndexWriterConfig config = new IndexWriterConfig(analyzer); speller = new SpellChecker(ramdir, strdist); StringBuilder names = new StringBuilder(); for (String s : possibles.values()) { names.append(s).append("\n"); } PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(names.toString())); speller.indexDictionary(ptd, config, true); List<IRI> needles = typeToURILkp.get(uri); for (IRI needle : needles) { String needlelabel = labels.get(needle); try { String[] suggestions = speller.suggestSimilar(needlelabel, 20, minDistance); for (String s : suggestions) { // found a match, so figure out what we actually matched float distance = strdist.getDistance(needlelabel, s); for (IRI match : revpos.get(s)) { hits.add(needle, new Hit(match, s, uriToTypeLkp.get(match), distance)); } } } catch (Exception e) { // our fallback resolution always works; it's just a ton slower errors.add(needle); } } } catch (Exception e) { log.error(e, e); } finally { for (Closeable c : new Closeable[] { analyzer, ramdir, speller }) { if (null != c) { try { c.close(); } catch (Exception e) { log.warn(e, e); } } } } if (!errors.isEmpty()) { fallbackResolve(errors, possibles, hits, strdist, minDistance); } return hits; }
From source file:com.pjaol.search.test.UnitTests.TestDistance.java
License:Apache License
protected void setUp() throws IOException { directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true); addData(writer);/*from w w w .ja v a 2 s . c o m*/ }
From source file:com.plug.Plug_8_5_2.java
License:Apache License
private void reindexTermbase(DbServer dbServer, HashMap<String, String> companys) throws Exception { log.info("Start upgrading Lucene index for termbase"); TermbaseHandler h = new TermbaseHandler(); List<Termbase> tbs = dbServer.getDbUtil().query(TermbaseHandler.SQL, h); m_analyzer = new NgramAnalyzer(3); for (Termbase tb : tbs) { if (tb.getCOMPANYID().equals(LuceneConstants.SUPER_COMPANY_ID)) { continue; }/*from www . ja v a 2 s. c o m*/ String cname = companys.get(tb.getCOMPANYID()); File termDir = new File(fileStorageDir, cname + "/TB-" + tb.getTB_NAME()); // check re-indexed if (isIndexedBefore(termDir, tb.getTB_NAME())) { logAlreadyIndex(tb.getTB_NAME()); continue; } showMsg(cname, tb.getTB_NAME(), false); // 1 delete old term base indexes logDeleteFile(termDir.getAbsolutePath()); deleteFile(termDir.getAbsolutePath()); // 2 create new empty dir termDir.mkdirs(); Definition dif = new Definition(tb.getTB_DEFINITION()); List<Index> indexs = dif.getIndexes(); for (Index index : indexs) { // 3 write index into ram RAMDirectory ramdir = new RAMDirectory(); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_44, m_analyzer); config.setOpenMode(OpenMode.CREATE_OR_APPEND); IndexWriter ramIndexWriter = new IndexWriter(ramdir, config); if (index != null && "fuzzy".equalsIgnoreCase(index.getType())) { String folder = index.getLanguageName() + "-" + index.getLocale() + "-TERM"; File indexFolder = new File(termDir, folder); m_directory = indexFolder.getAbsolutePath(); m_fsDir = new SimpleFSDirectory(indexFolder); String sql = TermHandler.generateSQL(tb.getTBID(), index.getLanguageName()); TermHandler termH = new TermHandler(); List<Document> docs = dbServer.getDbUtil().query(sql, termH); for (Document doc : docs) { ramIndexWriter.addDocument(doc); ramIndexWriter.commit(); } // 4 write index from ram into disk IndexWriter diskwriter = getIndexWriter(true); diskwriter.commit(); if (docs != null && docs.size() > 0) { Directory[] ds = new Directory[] { ramdir }; diskwriter.addIndexes(ds); diskwriter.commit(); } // 5 close index writer IOUtils.closeWhileHandlingException(ramIndexWriter); IOUtils.closeWhileHandlingException(diskwriter); ramIndexWriter = null; ramdir = null; } } writeTagFile(termDir, tb.getTB_NAME()); } log.info("End upgrading Lucene index for termbase"); }
From source file:com.pongasoft.kiwidoc.index.impl.lucene.impl.RAMDirectoryFactory.java
License:Apache License
/** * Opens an empty ram directory//from w w w .j a v a 2 s . c om * @return * @throws IOException */ private Directory openEmptyRAMDirectory() throws IOException { Directory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, null, true, IndexWriter.MaxFieldLength.UNLIMITED); writer.close(); log.info("Opened empty RAM directory"); return directory; }
From source file:com.ponysdk.sample.client.page.addon.SelectizeAddon.java
License:Apache License
public SelectizeAddon() { super(Element.newInput()); setTerminalHandler(this); ///*w w w .ja va 2 s. co m*/ final Analyzer analyzer = new StandardAnalyzer(); final Directory directory = new RAMDirectory(); final IndexWriterConfig config = new IndexWriterConfig(analyzer); IndexWriter writer; try { writer = new IndexWriter(directory, config); final Document doc = new Document(); final String text = "Test de ouf"; final FieldType fieldType = new FieldType(); fieldType.setIndexOptions(IndexOptions.NONE); fieldType.setStored(true); fieldType.setTokenized(false); doc.add(new Field("id", "12", fieldType)); doc.add(new Field("fieldname", text, TextField.TYPE_STORED)); writer.addDocument(doc); addAssetsType(writer); addTenor(writer); addClients(writer); addSide(writer); writer.close(); } catch (final IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { // Now search the index: final DirectoryReader ireader = DirectoryReader.open(directory); isearcher = new IndexSearcher(ireader); // Parse a simple query that searches for "text": // final QueryParser parser = new QueryParser("fieldname", // analyzer); // parser.setFuzzyMinSim(2f); final Term term = new Term("fieldname", "indesfed"); final Query query = new FuzzyQuery(term); // final TopDocs hits = isearcher.search(query, 1000).scoreDocs; // final Query query = parser.parse("indeed"); final ScoreDoc[] hits = isearcher.search(query, 1000).scoreDocs; // Iterate through the results: for (final ScoreDoc hit : hits) { System.err.println("Score : " + hit.score); final Document hitDoc = isearcher.doc(hit.doc); System.err.println("Found document" + hitDoc.getField("fieldname").stringValue()); } // ireader.close(); // directory.close(); } catch (final Exception exception) { exception.printStackTrace(); } // <input type="text" id="input-tags3" class="demo-default" // value="science,biology,chemistry,physics"> }
From source file:com.querydsl.lucene3.LuceneQueryTest.java
License:Apache License
@Before public void setUp() throws Exception { final QDocument entityPath = new QDocument("doc"); title = entityPath.title;// ww w . jav a 2s . co m year = entityPath.year; gross = entityPath.gross; idx = new RAMDirectory(); writer = createWriter(idx); writer.addDocument(createDocument("Jurassic Park", "Michael Crichton", "It's a UNIX system! I know this!", 1990, 90.00)); writer.addDocument( createDocument("Nummisuutarit", "Aleksis Kivi", "ESKO. Ja iloitset ja riemuitset?", 1864, 10.00)); writer.addDocument(createDocument("The Lord of the Rings", "John R. R. Tolkien", "One Ring to rule them all, One Ring to find them, One Ring to bring them all and in the darkness bind them", 1954, 89.00)); writer.addDocument(createDocument("Introduction to Algorithms", "Thomas H. Cormen, Charles E. Leiserson, Ronald L. Rivest, and Clifford Stein", "Bubble sort", 1990, 30.50)); writer.close(); IndexReader reader = IndexReader.open(idx); searcher = new IndexSearcher(reader); query = new LuceneQuery(new LuceneSerializer(true, true), searcher); }
From source file:com.querydsl.lucene3.LuceneQueryTest.java
License:Apache License
@Test public void Empty_Index_Should_Return_Empty_List() throws Exception { idx = new RAMDirectory(); writer = createWriter(idx);// w w w .java 2s . c o m writer.close(); IndexReader reader = IndexReader.open(idx); searcher = new IndexSearcher(reader); query = new LuceneQuery(new LuceneSerializer(true, true), searcher); assertTrue(query.fetch().isEmpty()); }