List of usage examples for org.apache.lucene.store Directory listAll
public abstract String[] listAll() throws IOException;
From source file:collene.Freedb.java
License:Apache License
public static void DoSearch(Directory directory) throws Exception { out.println("I think these are the files:"); for (String s : directory.listAll()) { out.println(s);/*from w w w . j av a 2 s.c o m*/ } IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(directory)); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_4_9); QueryParser parser = new QueryParser(Version.LUCENE_4_9, "any", analyzer); for (int i = 0; i < 5; i++) { long searchStart = System.currentTimeMillis(); Query query = parser.parse("morrissey"); //Query query = parser.parse("Dance"); TopDocs docs = searcher.search(query, 10); long searchEnd = System.currentTimeMillis(); out.println(String.format("%s %d total hits in %d", directory.getClass().getSimpleName(), docs.totalHits, searchEnd - searchStart)); long lookupStart = System.currentTimeMillis(); for (ScoreDoc d : docs.scoreDocs) { Document doc = searcher.doc(d.doc); out.println(String.format("%d %.2f %d %s", d.doc, d.score, d.shardIndex, doc.getField("any").stringValue())); } long lookupEnd = System.currentTimeMillis(); out.println(String.format("Document lookup took %d ms for %d documents", lookupEnd - lookupStart, docs.scoreDocs.length)); } directory.close(); }
From source file:collene.Freedb.java
License:Apache License
public static void BuildIndex(Directory directory) throws Exception { String freedbPath = "/Users/gdusbabek/Downloads/freedb-complete-20140701.tar.bz2"; if (directory == null) { System.out.println("Need to specify: { memory | file | cassandra }. Did you misspell something?"); System.exit(-1);// w w w .j av a 2 s .c o m } FreeDbReader reader = new FreeDbReader(new File(freedbPath), 50000); reader.start(); long indexStart = System.currentTimeMillis(); Collection<Document> documents = new ArrayList<Document>(BATCH_SIZE); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_4_9); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_9, analyzer); config.setOpenMode(IndexWriterConfig.OpenMode.CREATE); IndexWriter writer = new IndexWriter(directory, config); // stop after this many documents. final int maxDocuments = 400000; //Integer.MAX_VALUE; FreeDbEntry entry = reader.next(); int count = 0; while (entry != null && count < maxDocuments) { Document doc = new Document(); String any = entry.toString(); doc.add(new Field("any", any, TextField.TYPE_STORED)); doc.add(new Field("artist", entry.getArtist(), TextField.TYPE_NOT_STORED)); doc.add(new Field("album", entry.getAlbum(), TextField.TYPE_NOT_STORED)); doc.add(new Field("title", entry.getTitle(), TextField.TYPE_NOT_STORED)); doc.add(new Field("genre", entry.getGenre(), TextField.TYPE_NOT_STORED)); doc.add(new Field("year", entry.getYear(), TextField.TYPE_NOT_STORED)); for (int i = 0; i < entry.getTrackCount(); i++) { doc.add(new Field("track", entry.getTrack(i), TextField.TYPE_STORED)); } documents.add(doc); if (VERBOSE) { out.println(any); } if (documents.size() == BATCH_SIZE) { //out.println(String.format("Adding batch at count %d", count)); writer.addDocuments(documents); //out.println("done"); documents.clear(); } count += 1; if (count >= MAX_ENTRIES) { // done indexing. break; } entry = reader.next(); if (count % 100000 == 0) { out.println(String.format("Indexed %d documents", count)); // do a quick morrissey search for fun. // IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(ColDirectory.open( // new CassandraIO(8192, "collene", "cindex").start("127.0.0.1:9042"), // new CassandraIO(8192, "collene", "cmeta").start("127.0.0.1:9042"), // new CassandraIO(8192, "collene", "clock").start("127.0.0.1:9042") // ))); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, false)); QueryParser parser = new QueryParser(Version.LUCENE_4_9, "any", analyzer); long searchStart = System.currentTimeMillis(); Query query = parser.parse("morrissey"); TopDocs docs = searcher.search(query, 10); long searchEnd = System.currentTimeMillis(); out.println(String.format("%s %d total hits in %d", directory.getClass().getSimpleName(), docs.totalHits, searchEnd - searchStart)); for (ScoreDoc d : docs.scoreDocs) { out.println(String.format("%d %.2f %d", d.doc, d.score, d.shardIndex)); } } } if (documents.size() > 0) { out.println(String.format("Adding batch at count %d", count)); writer.addDocuments(documents); out.println("done"); documents.clear(); // do a quick morrissey search for fun. IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, false)); QueryParser parser = new QueryParser(Version.LUCENE_4_9, "any", analyzer); long searchStart = System.currentTimeMillis(); Query query = parser.parse("morrissey"); TopDocs docs = searcher.search(query, 10); long searchEnd = System.currentTimeMillis(); out.println(String.format("%s %d total hits in %d", directory.getClass().getSimpleName(), docs.totalHits, searchEnd - searchStart)); for (ScoreDoc d : docs.scoreDocs) { out.println(String.format("%d %.2f %d", d.doc, d.score, d.shardIndex)); } } long indexTime = System.currentTimeMillis() - indexStart; out.println(String.format("Indexed %d things in %d ms (%s)", count, indexTime, directory.toString())); // long startMerge = System.currentTimeMillis(); // writer.forceMerge(1, true); // long endMerge = System.currentTimeMillis(); // out.println(String.format("merge took %d ms", endMerge-startMerge)); out.println("I think these are the files:"); for (String s : directory.listAll()) { out.println(s); } writer.close(true); directory.close(); }
From source file:collene.TestLuceneAssumptions.java
License:Apache License
private static void dump(File fileDir, Directory indexDir) throws Exception { for (File f : fileDir.listFiles()) { //System.out.println(String.format("f %s", f.getAbsolutePath())); }/* w ww . jav a 2 s. com*/ try { for (String s : indexDir.listAll()) { //System.out.println(String.format("i %s", s)); } } catch (AlreadyClosedException ex) { //System.out.println("Cannot list closed directory"); } }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
private void cleanFullTextIndex(List<ComplexTypeMetadata> sortedTypesToDrop) { if (dataSource.supportFullText()) { try {/*from w ww .java2 s . c o m*/ for (ComplexTypeMetadata typeMetadata : sortedTypesToDrop) { try { Class<?> clazz = storageClassLoader .loadClass(ClassCreator.getClassName(typeMetadata.getName())); File directoryFile = new File( dataSource.getIndexDirectory() + '/' + getName() + '/' + clazz.getName()); if (directoryFile.exists()) { final Directory directory = FSDirectory.open(directoryFile); final String lockName = "delete." + typeMetadata.getName(); //$NON-NLS-1$ // Default 5 sec timeout for lock Lock.With lock = new Lock.With(directory.makeLock(lockName), 5000) { @Override protected Object doBody() throws IOException { String[] files = directory.listAll(); for (String file : files) { if (!file.endsWith(lockName)) { // Don't delete our own lock directory.deleteFile(file); } } return null; } }; lock.run(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Removed full text directory for entity '" + typeMetadata.getName() //$NON-NLS-1$ + "' at '" //$NON-NLS-1$ + directoryFile.getAbsolutePath() + "'"); //$NON-NLS-1$ } } else { LOGGER.warn("Full text index directory for entity '" + typeMetadata.getName() //$NON-NLS-1$ + "' no longer exists. No need to delete it."); //$NON-NLS-1$ } } catch (Exception e) { LOGGER.warn("Could not remove full text directory for '" + typeMetadata.getName() + "'.", //$NON-NLS-1$//$NON-NLS-2$ e); } } } catch (Exception e) { LOGGER.warn("Could not correctly clean full text directory.", e); //$NON-NLS-1$ } } }
From source file:com.bah.lucene.BaseDirectoryTestSuite.java
License:Apache License
private Directory getControlDir(final Directory control, final Directory test) { return new Directory() { @Override/*w w w . j a va 2 s. co m*/ public Lock makeLock(String name) { return control.makeLock(name); } @Override public void clearLock(String name) throws IOException { control.clearLock(name); } @Override public void setLockFactory(LockFactory lockFactory) throws IOException { control.setLockFactory(lockFactory); } @Override public LockFactory getLockFactory() { return control.getLockFactory(); } @Override public String getLockID() { return control.getLockID(); } @Override public void copy(Directory to, String src, String dest, IOContext context) throws IOException { control.copy(to, src, dest, context); } @Override public IndexInputSlicer createSlicer(String name, IOContext context) throws IOException { return control.createSlicer(name, context); } @Override public IndexOutput createOutput(final String name, IOContext context) throws IOException { final IndexOutput testOutput = test.createOutput(name, context); final IndexOutput controlOutput = control.createOutput(name, context); return new IndexOutput() { @Override public void flush() throws IOException { testOutput.flush(); controlOutput.flush(); } @Override public void close() throws IOException { testOutput.close(); controlOutput.close(); } @Override public long getFilePointer() { long filePointer = testOutput.getFilePointer(); long controlFilePointer = controlOutput.getFilePointer(); if (controlFilePointer != filePointer) { System.err.println("Output Name [" + name + "] with filePointer [" + filePointer + "] and control filePointer [" + controlFilePointer + "] does not match"); } return filePointer; } @SuppressWarnings("deprecation") @Override public void seek(long pos) throws IOException { testOutput.seek(pos); controlOutput.seek(pos); } @Override public long length() throws IOException { long length = testOutput.length(); long controlLength = controlOutput.length(); if (controlLength != length) { System.err.println("Ouput Name [" + name + "] with length [" + length + "] and control length [" + controlLength + "] does not match"); } return length; } @Override public void writeByte(byte b) throws IOException { testOutput.writeByte(b); controlOutput.writeByte(b); } @Override public void writeBytes(byte[] b, int offset, int length) throws IOException { testOutput.writeBytes(b, offset, length); controlOutput.writeBytes(b, offset, length); } }; } @Override public IndexInput openInput(final String name, IOContext context) throws IOException { final IndexInput testInput = test.openInput(name, context); final IndexInput controlInput = control.openInput(name, context); return new IndexInputCompare(name, testInput, controlInput); } @Override public String[] listAll() throws IOException { return test.listAll(); } @Override public boolean fileExists(String name) throws IOException { return test.fileExists(name); } @Override public void deleteFile(String name) throws IOException { test.deleteFile(name); control.deleteFile(name); } @Override public long fileLength(String name) throws IOException { long fileLength = test.fileLength(name); long controlFileLength = control.fileLength(name); if (controlFileLength != fileLength) { System.err.println("Input Name [" + name + "] with length [" + fileLength + "] and control length [" + controlFileLength + "] does not match"); } return fileLength; } @Override public void sync(Collection<String> names) throws IOException { test.sync(names); test.sync(names); } @Override public void close() throws IOException { test.close(); control.close(); } }; }
From source file:com.devwebsphere.wxslucene.GridDirectory.java
License:Open Source License
public static void copy(Directory src, GridDirectory dest, boolean closeDirSrc) throws IOException { final String[] files = src.listAll(); IndexFileNameFilter filter = IndexFileNameFilter.getFilter(); byte[] buf = new byte[COPY_BUFFER_SIZE]; for (int i = 0; i < files.length; i++) { if (!filter.accept(null, files[i])) continue; IndexOutput os = null;/*from www . ja v a 2 s . c o m*/ ChecksumIndexInput is = null; try { // create file in dest directory os = dest.createOutput(files[i]); // read current file is = new ChecksumIndexInput(src.openInput(files[i])); // and copy to dest directory long len = is.length(); long readCount = 0; while (readCount < len) { int toRead = readCount + COPY_BUFFER_SIZE > len ? (int) (len - readCount) : COPY_BUFFER_SIZE; is.readBytes(buf, 0, toRead); os.writeBytes(buf, toRead); readCount += toRead; } long src_sum = is.getChecksum(); os.flush(); // this code can just compare the new file with the old one // to make sure it's copied correctly ChecksumIndexInput dst_check_stream = new ChecksumIndexInput(dest.openInput(files[i])); len = dst_check_stream.length(); readCount = 0; while (readCount < len) { int toRead = readCount + COPY_BUFFER_SIZE > len ? (int) (len - readCount) : COPY_BUFFER_SIZE; dst_check_stream.readBytes(buf, 0, toRead); readCount += toRead; } long dst_sum = dst_check_stream.getChecksum(); if (dst_sum == src_sum) { logger.log(Level.INFO, "Verify " + files[i] + " was successful"); } else { logger.log(Level.INFO, "Verify " + files[i] + " failed"); throw new IllegalStateException("File " + files[i] + " failed verification"); } } finally { // graceful cleanup try { if (os != null) os.close(); } finally { if (is != null) is.close(); } } } if (closeDirSrc) src.close(); }
From source file:com.gentics.cr.lucene.indexaccessor.IndexAccessorFactory.java
License:Apache License
private void createAccessor(final Directory dir, final Analyzer analyzer, final Query query, final Set<Sort> sortFields) throws IOException { IndexAccessor accessor = null;//w w w . ja va2 s . c o m if (query != null) { accessor = new WarmingIndexAccessor(dir, analyzer, query); } else { accessor = new DefaultIndexAccessor(dir, analyzer); } accessor.open(); if (dir.listAll().length == 0) { IndexWriter indexWriter = new IndexWriter(dir, null, true, IndexWriter.MaxFieldLength.UNLIMITED); indexWriter.close(); } IndexAccessor existingAccessor = indexAccessors.putIfAbsent(dir, accessor); if (existingAccessor != null) { accessor.close(); throw new IllegalStateException("IndexAccessor already exists: " + dir); } }
From source file:com.github.rnewson.couchdb.lucene.util.Utils.java
License:Apache License
public static long directorySize(final Directory dir) throws IOException { long result = 0; for (final String name : dir.listAll()) { result += dir.fileLength(name);//from w w w .j ava2s .c o m } return result; }
From source file:com.google.gerrit.pgm.Reindex.java
License:Apache License
private void deleteAll() throws IOException { for (String index : SCHEMA_VERSIONS.keySet()) { File file = new File(sitePaths.index_dir, index); if (file.exists()) { Directory dir = FSDirectory.open(file); try { for (String name : dir.listAll()) { dir.deleteFile(name); }// ww w. j av a2 s. c om } finally { dir.close(); } } } }
From source file:com.ibm.watson.developer_cloud.professor_languo.ingestion.indexing.LuceneIndexer.java
License:Open Source License
/** * clear all the index files in the Lucence directory * /* w w w . j a v a2 s . c o m*/ * @param indexDir - Lucene directory to store index file * @throws IngestionException */ private void clearIndexDirectory(Directory indexDir) throws IngestionException { if (indexDir != null) { try { String[] files = indexDir.listAll(); for (String file : files) indexDir.deleteFile(file); } catch (IOException e) { throw new IngestionException(e); } } }