List of usage examples for org.apache.lucene.index IndexReader document
public final Document document(int docID) throws IOException
n
th Document
in this index. From source file:net.semanticmetadata.lire.searchers.forevaluations.GenericFastImageSearcherForEvaluation.java
License:Open Source License
public ImageDuplicates findDuplicates(IndexReader reader) throws IOException { // get the first document: SimpleImageDuplicates simpleImageDuplicates = null; // try { // if (!IndexReader.indexExists(reader.directory())) // throw new FileNotFoundException("No index found at this specific location."); Document doc = reader.document(0); LireFeature lireFeature = extractorItem.getFeatureInstance(); if (doc.getField(fieldName).binaryValue() != null && doc.getField(fieldName).binaryValue().length > 0) lireFeature.setByteArrayRepresentation(doc.getField(fieldName).binaryValue().bytes, doc.getField(fieldName).binaryValue().offset, doc.getField(fieldName).binaryValue().length); HashMap<Double, List<String>> duplicates = new HashMap<Double, List<String>>(); // Needed for check whether the document is deleted. Bits liveDocs = MultiFields.getLiveDocs(reader); int docs = reader.numDocs(); int numDuplicates = 0; for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it. Document d = reader.document(i); double distance = getDistance(d, lireFeature); if (!duplicates.containsKey(distance)) { duplicates.put(distance, new LinkedList<String>()); } else {/*from ww w. j ava2 s . c o m*/ numDuplicates++; } duplicates.get(distance).add(d.getField(DocumentBuilder.FIELD_NAME_IDENTIFIER).stringValue()); } if (numDuplicates == 0) return null; LinkedList<List<String>> results = new LinkedList<List<String>>(); for (double d : duplicates.keySet()) { if (duplicates.get(d).size() > 1) { results.add(duplicates.get(d)); } } simpleImageDuplicates = new SimpleImageDuplicates(results); // } catch (InstantiationException e) { // logger.log(Level.SEVERE, "Error instantiating class for generic image searcher: " + e.getMessage()); // } catch (IllegalAccessException e) { // logger.log(Level.SEVERE, "Error instantiating class for generic image searcher: " + e.getMessage()); // } return simpleImageDuplicates; }
From source file:net.semanticmetadata.lire.searchers.GenericFastImageSearcher.java
License:Open Source License
/** * @param reader//from w w w. j a v a 2 s. c o m * @param lireFeature * @return the maximum distance found for normalizing. * @throws IOException */ protected double findSimilar(IndexReader reader, LireFeature lireFeature) throws IOException { maxDistance = -1d; // clear result set ... docs.clear(); // Needed for check whether the document is deleted. Bits liveDocs = MultiFields.getLiveDocs(reader); Document d; double tmpDistance; int docs = reader.numDocs(); if (!isCaching) { // we read each and every document from the index and then we compare it to the query. for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it. d = reader.document(i); tmpDistance = getDistance(d, lireFeature); assert (tmpDistance >= 0); // if the array is not full yet: if (this.docs.size() < maxHits) { this.docs.add(new SimpleResult(tmpDistance, i)); if (tmpDistance > maxDistance) maxDistance = tmpDistance; } else if (tmpDistance < maxDistance) { // if it is nearer to the sample than at least on of the current set: // remove the last one ... this.docs.remove(this.docs.last()); // add the new one ... this.docs.add(new SimpleResult(tmpDistance, i)); // and set our new distance border ... maxDistance = this.docs.last().getDistance(); } } } else { LinkedList<Consumer> tasks = new LinkedList<Consumer>(); LinkedList<Thread> threads = new LinkedList<Thread>(); Consumer consumer; Thread thread; Thread p = new Thread(new Producer()); p.start(); for (int i = 0; i < numThreads; i++) { consumer = new Consumer(lireFeature); thread = new Thread(consumer); thread.start(); tasks.add(consumer); threads.add(thread); } for (Thread next : threads) { try { next.join(); } catch (InterruptedException e) { e.printStackTrace(); } } TreeSet<SimpleResult> tmpDocs; boolean flag; SimpleResult simpleResult; for (Consumer task : tasks) { tmpDocs = task.getResult(); flag = true; while (flag && (tmpDocs.size() > 0)) { simpleResult = tmpDocs.pollFirst(); if (this.docs.size() < maxHits) { this.docs.add(simpleResult); if (simpleResult.getDistance() > maxDistance) maxDistance = simpleResult.getDistance(); } else if (simpleResult.getDistance() < maxDistance) { // this.docs.remove(this.docs.last()); this.docs.pollLast(); this.docs.add(simpleResult); maxDistance = this.docs.last().getDistance(); } else flag = false; } } } return maxDistance; }
From source file:net.semanticmetadata.lire.searchers.LshImageSearcher.java
License:Open Source License
private ImageSearchHits search(String[] hashes, GlobalFeature queryFeature, IndexReader reader) throws IOException { // first search by text: IndexSearcher searcher = new IndexSearcher(reader); searcher.setSimilarity(new ClassicSimilarity() { @Override//from ww w.jav a 2 s . c o m public float tf(float freq) { return 1; } @Override public float idf(long docFreq, long numDocs) { return 1; } @Override public float coord(int overlap, int maxOverlap) { return 1; } @Override public float queryNorm(float sumOfSquaredWeights) { return 1; } @Override public float sloppyFreq(int distance) { return 1; } @Override public float lengthNorm(FieldInvertState state) { return 1; } }); BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); for (int i = 0; i < hashes.length; i++) { // be aware that the hashFunctionsFileName of the field must match the one you put the hashes in before. queryBuilder.add(new BooleanClause(new TermQuery(new Term(hashesFieldName, hashes[i] + "")), BooleanClause.Occur.SHOULD)); } TopDocs docs = searcher.search(queryBuilder.build(), maxResultsHashBased); // then re-rank TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>(); double maxDistance = 0d; double tmpScore = 0d; for (int i = 0; i < docs.scoreDocs.length; i++) { feature.setByteArrayRepresentation( reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).bytes, reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).offset, reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).length); tmpScore = queryFeature.getDistance(feature); if (resultScoreDocs.size() < maximumHits) { resultScoreDocs.add(new SimpleResult(tmpScore, docs.scoreDocs[i].doc)); maxDistance = Math.max(maxDistance, tmpScore); } else if (tmpScore < maxDistance) { resultScoreDocs.add(new SimpleResult(tmpScore, docs.scoreDocs[i].doc)); } while (resultScoreDocs.size() > maximumHits) { resultScoreDocs.remove(resultScoreDocs.last()); maxDistance = resultScoreDocs.last().getDistance(); } // resultScoreDocs.add(new SimpleResult(tmpScore, reader.document(docs.scoreDocs[i].doc), docs.scoreDocs[i].doc)); } return new SimpleImageSearchHits(resultScoreDocs, maxDistance); }
From source file:net.semanticmetadata.lire.searchers.MetricSpacesImageSearcher.java
License:Open Source License
private ImageSearchHits search(String hashes, GlobalFeature queryFeature, IndexReader reader) throws IOException { // first search by text: IndexSearcher searcher = new IndexSearcher(reader); searcher.setSimilarity(new BaseSimilarity()); QueryParser qp = new QueryParser(hashesFieldName, new WhitespaceAnalyzer()); Query query = null;/*from w w w .j a v a 2s.c om*/ try { query = qp.parse(hashes); } catch (ParseException e) { e.printStackTrace(); } if (query == null) return null; TopDocs docs = searcher.search(query, maxResultsHashBased); // System.out.println(docs.totalHits); // then re-rank TreeSet<SimpleResult> resultScoreDocs = new TreeSet<SimpleResult>(); double maxDistance = -1d; double tmpScore; for (int i = 0; i < docs.scoreDocs.length; i++) { feature.setByteArrayRepresentation( reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).bytes, reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).offset, reader.document(docs.scoreDocs[i].doc).getBinaryValue(featureFieldName).length); tmpScore = queryFeature.getDistance(feature); assert (tmpScore >= 0); if (resultScoreDocs.size() < maximumHits) { resultScoreDocs.add(new SimpleResult(tmpScore, docs.scoreDocs[i].doc)); maxDistance = Math.max(maxDistance, tmpScore); } else if (tmpScore < maxDistance) { // if it is nearer to the sample than at least one of the current set: // remove the last one ... resultScoreDocs.remove(resultScoreDocs.last()); // add the new one ... resultScoreDocs.add(new SimpleResult(tmpScore, docs.scoreDocs[i].doc)); // and set our new distance border ... maxDistance = resultScoreDocs.last().getDistance(); } } assert (resultScoreDocs.size() <= maximumHits); return new SimpleImageSearchHits(resultScoreDocs, maxDistance); }
From source file:net.semanticmetadata.lire.searchers.TestSearching.java
License:Open Source License
public void testSearch() throws IOException, IllegalAccessException, InstantiationException { Cluster[] cvsurf512 = Cluster.readClusters(codebookPath + "CvSURF512"); Cluster[] simpleceddcvsurf512 = Cluster.readClusters(codebookPath + "SIMPLEdetCVSURFCEDD512"); ParallelIndexer parallelIndexer = new ParallelIndexer(DocumentBuilder.NUM_OF_THREADS, indexPath, testExtensive, numOfClusters, numOfDocsForVocabulary, aggregatorClass); parallelIndexer.addExtractor(globalFeatureClass); parallelIndexer.addExtractor(localFeatureClass, cvsurf512); parallelIndexer.addExtractor(globalFeatureClass, keypointDetector, simpleceddcvsurf512); parallelIndexer.run();/* ww w . j a va 2 s. co m*/ BufferedImage image = ImageIO.read(new FileInputStream(imageToSearch)); IndexReader reader = DirectoryReader .open(new RAMDirectory(FSDirectory.open(Paths.get(indexPath)), IOContext.READONCE)); System.out.println("Documents in the reader: " + reader.maxDoc()); GenericFastImageSearcher ceddSearcher = new GenericFastImageSearcher(10, globalFeatureClass, true, reader); ImageSearchHits ceddhits = ceddSearcher.search(image, reader); String hitFile; for (int y = 0; y < ceddhits.length(); y++) { hitFile = reader.document(ceddhits.documentID(y)).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + ceddhits.score(y)); } System.out.println(); GenericFastImageSearcher cvsurfsearcher = new GenericFastImageSearcher(10, localFeatureClass, aggregatorClass.newInstance(), 512, true, reader, indexPath + ".config"); ImageSearchHits cvsurfhits = cvsurfsearcher.search(image, reader); for (int y = 0; y < cvsurfhits.length(); y++) { hitFile = reader.document(cvsurfhits.documentID(y)).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + cvsurfhits.score(y)); } System.out.println(); GenericFastImageSearcher simpleceddcvsurfsearcher = new GenericFastImageSearcher(10, globalFeatureClass, keypointDetector, aggregatorClass.newInstance(), 512, true, reader, indexPath + ".config"); ImageSearchHits simpleceddcvsurfhits = simpleceddcvsurfsearcher.search(image, reader); for (int y = 0; y < simpleceddcvsurfhits.length(); y++) { hitFile = reader.document(simpleceddcvsurfhits.documentID(y)) .getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + simpleceddcvsurfhits.score(y)); } System.out.println(); }
From source file:net.semanticmetadata.lire.searchers.TestSearching.java
License:Open Source License
public void testSearchMulImages() throws IOException, IllegalAccessException, InstantiationException { Cluster[] cvsurf512 = Cluster.readClusters(codebookPath + "CvSURF512"); Cluster[] simpleceddcvsurf512 = Cluster.readClusters(codebookPath + "SIMPLEdetCVSURFCEDD512"); ParallelIndexer parallelIndexer = new ParallelIndexer(DocumentBuilder.NUM_OF_THREADS, indexPath, testExtensive, numOfClusters, numOfDocsForVocabulary, aggregatorClass); parallelIndexer.addExtractor(globalFeatureClass); parallelIndexer.addExtractor(localFeatureClass, cvsurf512); parallelIndexer.addExtractor(globalFeatureClass, keypointDetector, simpleceddcvsurf512); parallelIndexer.run();//from w ww .j a v a2 s .c o m ArrayList<String> images = FileUtils.readFileLines(new File(testExtensive), true); IndexReader reader = DirectoryReader .open(new RAMDirectory(FSDirectory.open(Paths.get(indexPath)), IOContext.READONCE)); System.out.println("Documents in the reader: " + reader.maxDoc()); GenericFastImageSearcher ceddSearcher = new GenericFastImageSearcher(1, globalFeatureClass, true, reader); GenericFastImageSearcher cvsurfsearcher = new GenericFastImageSearcher(1, localFeatureClass, aggregatorClass.newInstance(), 512, true, reader, indexPath + ".config"); GenericFastImageSearcher simpleceddcvsurfsearcher = new GenericFastImageSearcher(1, globalFeatureClass, keypointDetector, aggregatorClass.newInstance(), 512, true, reader, indexPath + ".config"); BufferedImage image; ImageSearchHits ceddhits, cvsurfhits, simpleceddcvsurfhits; String hitFile; int counter = 0; for (String next : images) { image = ImageIO.read(new FileInputStream(next)); next = next.substring(next.lastIndexOf('\\') + 1); System.out.println(counter + " ~ " + next); ceddhits = ceddSearcher.search(image, reader); hitFile = reader.document(ceddhits.documentID(0)).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); if (next.equals(hitFile)) System.out.println(0 + ". " + hitFile + " " + ceddhits.score(0)); else System.err.println("ERROR " + hitFile + " " + ceddhits.score(0) + " ERROR"); cvsurfhits = cvsurfsearcher.search(image, reader); hitFile = reader.document(cvsurfhits.documentID(0)).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); if (next.equals(hitFile)) System.out.println(0 + ". " + hitFile + " " + cvsurfhits.score(0)); else System.err.println("ERROR " + hitFile + " " + cvsurfhits.score(0) + " ERROR"); simpleceddcvsurfhits = simpleceddcvsurfsearcher.search(image, reader); hitFile = reader.document(simpleceddcvsurfhits.documentID(0)) .getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); if (next.equals(hitFile)) System.out.println(0 + ". " + hitFile + " " + simpleceddcvsurfhits.score(0)); else System.err.println("ERROR " + hitFile + " " + simpleceddcvsurfhits.score(0) + " ERROR"); counter++; System.out.println(); } }
From source file:net.semanticmetadata.lire.searchers.TestSearching.java
License:Open Source License
public void testSeparateIndex() throws IOException, IllegalAccessException, InstantiationException { Cluster[] cvsurf512 = Cluster.readClusters(codebookPath + "CvSURF512"); Cluster[] simpleceddcvsurf512 = Cluster.readClusters(codebookPath + "SIMPLEdetCVSURFCEDD512"); ParallelIndexer parallelIndexer = new ParallelIndexer(DocumentBuilder.NUM_OF_THREADS, indexPath, testExtensiveRed, numOfClusters, numOfDocsForVocabulary, aggregatorClass); parallelIndexer.addExtractor(globalFeatureClass); parallelIndexer.addExtractor(localFeatureClass, cvsurf512); parallelIndexer.addExtractor(globalFeatureClass, keypointDetector, simpleceddcvsurf512); parallelIndexer.run();/*w w w .ja v a2 s .c o m*/ ParallelIndexer parallelIndexerSeparate = new ParallelIndexer(DocumentBuilder.NUM_OF_THREADS, indexPathSeparate, testExtensiveBlack, indexPath); parallelIndexerSeparate.run(); IndexReader readerIndex = DirectoryReader .open(new RAMDirectory(FSDirectory.open(Paths.get(indexPath)), IOContext.READONCE)); System.out.println("Documents in the reader: " + readerIndex.maxDoc()); IndexReader readerQueries = DirectoryReader .open(new RAMDirectory(FSDirectory.open(Paths.get(indexPathSeparate)), IOContext.READONCE)); System.out.println("Documents in the reader: " + readerQueries.maxDoc()); GenericFastImageSearcher ceddSearcher = new GenericFastImageSearcher(5, globalFeatureClass, true, readerIndex); GenericFastImageSearcher cvsurfsearcher = new GenericFastImageSearcher(5, localFeatureClass, aggregatorClass.newInstance(), 512, true, readerIndex, indexPath + ".config"); GenericFastImageSearcher simpleceddcvsurfsearcher = new GenericFastImageSearcher(5, globalFeatureClass, keypointDetector, aggregatorClass.newInstance(), 512, true, readerIndex, indexPath + ".config"); Bits liveDocs = MultiFields.getLiveDocs(readerQueries); ImageSearchHits ceddhits, cvsurfhits, simpleceddcvsurfhits; Document queryDoc; String queryfile, hitFile; int counter = 0; for (int i = 0; i < readerQueries.maxDoc(); i++) { if (readerQueries.hasDeletions() && !liveDocs.get(i)) continue; queryDoc = readerQueries.document(i); queryfile = queryDoc.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; queryfile = queryfile.substring(queryfile.lastIndexOf('\\') + 1); System.out.println(counter + ". Query image: " + queryfile); ceddhits = ceddSearcher.search(queryDoc, readerIndex); cvsurfhits = cvsurfsearcher.search(queryDoc, readerIndex); simpleceddcvsurfhits = simpleceddcvsurfsearcher.search(queryDoc, readerIndex); System.out.println("Global:"); for (int y = 0; y < ceddhits.length(); y++) { hitFile = readerIndex.document(ceddhits.documentID(y)) .getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + ceddhits.score(y)); } System.out.println("Local:"); for (int y = 0; y < cvsurfhits.length(); y++) { hitFile = readerIndex.document(cvsurfhits.documentID(y)) .getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + cvsurfhits.score(y)); } System.out.println("Simple:"); for (int y = 0; y < simpleceddcvsurfhits.length(); y++) { hitFile = readerIndex.document(simpleceddcvsurfhits.documentID(y)) .getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; hitFile = hitFile.substring(hitFile.lastIndexOf('\\') + 1); System.out.println(y + ". " + hitFile + " " + simpleceddcvsurfhits.score(y)); } System.out.println(); counter++; } }
From source file:net.semanticmetadata.lire.TestImageSearcher.java
License:Open Source License
public void testCachingSearcher() throws IOException { IndexReader ir = DirectoryReader.open(FSDirectory.open(new File("C:\\Temp\\test-100k-cedd-idx"))); GenericFastImageSearcher is = new GenericFastImageSearcher(1, CEDD.class, true, ir); SingleNddCeddImageSearcher nis = new SingleNddCeddImageSearcher(ir); LinkedList<Document> q = new LinkedList<Document>(); for (int i = 0; i < Math.min(1000, ir.maxDoc()); i++) { q.add(ir.document(i)); }//from ww w .j a va 2 s . c om long time = System.currentTimeMillis(); int count = 0; for (Iterator<Document> iterator = q.iterator(); iterator.hasNext();) { Document next = iterator.next(); String id = is.search(next, ir).doc(0).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; CEDD cedd = new CEDD(); BytesRef binaryValue = next.getBinaryValue(cedd.getFieldName()); cedd.setByteArrayRepresentation(binaryValue.bytes, binaryValue.offset, binaryValue.length); String s = nis.findMostSimilar(cedd).getDocument().getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; String qID = next.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]; System.out.println(s.equals(id) + " " + id.equals(qID) + " " + qID.equals(s)); count++; if (count > 100) break; } long l = System.currentTimeMillis() - time; System.out.printf("Tested %d search requests on %d documents: overall time of %d:%02d, %.2f ms per search", count, ir.maxDoc(), l / (1000 * 60), (l / 1000) % 60, ((float) l / (float) count)); }
From source file:net.semanticmetadata.lire.TestImageSearcher.java
License:Open Source License
public void testCustomCachingSearcher() throws IOException { IndexReader ir = DirectoryReader.open(FSDirectory.open(new File("C:\\Temp\\test-100k-cedd-idx"))); SingleNddCeddImageSearcher is = new SingleNddCeddImageSearcher(ir); LinkedList<LireFeature> q = new LinkedList<LireFeature>(); for (int i = 0; i < ir.maxDoc(); i++) { Document d = ir.document(i); CEDD cedd = new CEDD(); BytesRef binaryValue = d.getBinaryValue(cedd.getFieldName()); cedd.setByteArrayRepresentation(binaryValue.bytes, binaryValue.offset, binaryValue.length); q.add(cedd);/* w w w . j av a2s . c o m*/ } long time = System.currentTimeMillis(); int count = 0; for (Iterator<LireFeature> iterator = q.iterator(); iterator.hasNext();) { LireFeature next = iterator.next(); is.findMostSimilar(next); count++; if (count > 100) break; } long l = System.currentTimeMillis() - time; System.out.printf("Tested %d search requests on %d documents: overall time of %d:%02d, %.2f ms per search", count, ir.maxDoc(), l / (1000 * 60), (l / 1000) % 60, ((float) l / (float) count)); }
From source file:net.semanticmetadata.lire.TestImageSearcher.java
License:Open Source License
public void testCachingSearcherParallel() throws IOException, InterruptedException { final IndexReader ir = DirectoryReader.open(FSDirectory.open(new File("C:\\Temp\\test-100k-cedd-idx"))); SingleNddCeddImageSearcher is = new SingleNddCeddImageSearcher(ir); LinkedList<LireFeature> q = new LinkedList<LireFeature>(); for (int i = 0; i < ir.maxDoc(); i++) { Document d = ir.document(i); CEDD cedd = new CEDD(); BytesRef binaryValue = d.getBinaryValue(cedd.getFieldName()); cedd.setByteArrayRepresentation(binaryValue.bytes, binaryValue.offset, binaryValue.length); q.add(cedd);//from w ww .j a v a 2s . c om } int count = 0; Thread[] searchers = new Thread[3]; final LinkedBlockingQueue<LireFeature> queryQueue = new LinkedBlockingQueue<LireFeature>(1000); for (int i = 0; i < searchers.length; i++) { searchers[i] = new Thread(new Runnable() { @Override public void run() { SingleNddCeddImageSearcher is = new SingleNddCeddImageSearcher(ir); LireFeature remove; while ((remove = queryQueue.remove()) instanceof CEDD) { try { is.findMostSimilar(remove); } catch (IOException e) { e.printStackTrace(); } } } }); searchers[i].start(); } long time = System.currentTimeMillis(); for (Iterator<LireFeature> iterator = q.iterator(); iterator.hasNext() && count < 1000;) { LireFeature next = iterator.next(); try { queryQueue.put(next); count++; } catch (InterruptedException e) { e.printStackTrace(); } } for (int i = 0; i < 8; i++) { queryQueue.put(new ScalableColor()); } for (int i = 0; i < searchers.length; i++) { searchers[i].join(); } long l = System.currentTimeMillis() - time; System.out.printf("Tested %d search requests on %d documents: overall time of %d:%02d, %.2f ms per search", count, ir.maxDoc(), l / (1000 * 60), (l / 1000) % 60, ((float) l / (float) count)); }