List of usage examples for org.apache.lucene.index IndexReader numDocs
public abstract int numDocs();
From source file:nl.uva.mlc.eurovoc.analyzer.PropagationAnalyzer.java
private void testIndexDocReader() { try {/*from w w w . j av a 2s. c o m*/ IndexReader testIreader = IndexReader .open(new SimpleFSDirectory(new File(configFile.getProperty("TEST_INDEX_PATH")))); for (int i = 0; i < testIreader.numDocs(); i++) { String id = testIreader.document(i).get("ID"); String title = testIreader.document(i).get("TITLE"); String text = testIreader.document(i).get("TEXT"); String namedEntities = testIreader.document(i).get("NAMEDENTITIES"); String[] classes = testIreader.document(i).get("CLASSES").split("\\s+"); EuroVocDoc doc = new EuroVocDoc(id, title, text, namedEntities, new ArrayList<String>(Arrays.asList(classes))); Quering(doc); log.info(i + " from " + testIreader.numDocs()); } } catch (IOException ex) { log.error(ex); } }
From source file:org.abondar.experimental.eventsearch.SearchData.java
public String[] getEventData(String evCategory) throws IOException, org.apache.lucene.queryparser.classic.ParseException { Analyzer an = new StandardAnalyzer(Version.LUCENE_44); Query q = new QueryParser(Version.LUCENE_44, "category", an).parse(evCategory); Directory dir = FSDirectory.open(new File(indexPath)); IndexReader reader = DirectoryReader.open(dir); Integer numDocs = reader.numDocs(); IndexSearcher searcher = new IndexSearcher(reader); ScoreDoc[] hits = searcher.search(q, numDocs).scoreDocs; String[] res = new String[hits.length]; for (int i = 0; i < hits.length; i++) { int docId = hits[i].doc; res[i] = searcher.doc(docId).get("path"); }//from ww w. ja v a 2 s . c o m return res; }
From source file:org.alfresco.repo.search.impl.lucene.index.IndexInfo.java
License:Open Source License
public int getNumberOfDocuments() throws IOException { IndexReader reader = getMainIndexReferenceCountingReadOnlyIndexReader(); try {/*from w w w . j a va 2 s .c o m*/ return reader.numDocs(); } finally { reader.close(); } }
From source file:org.alfresco.repo.search.impl.lucene.index.IndexInfoTest.java
License:Open Source License
public void testCreateAndSearch() throws IOException { System.setProperty("disableLuceneLocks", "true"); // no deletions - create only HashSet<String> deletions = new HashSet<String>(); for (int i = 0; i < 0; i++) { deletions.add(new NodeRef(new StoreRef("woof", "bingle"), GUID.generate()).toString()); }// w w w.j a va 2s . c o m File tempLocation = TempFileProvider.getTempDir(); File testArea = new File(tempLocation, "IndexInfoTest"); File testDir = new File(testArea, "" + System.currentTimeMillis()); final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null); for (int i = 0; i < WORD_LIST.length; i++) { IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); reader.close(); String guid = GUID.generate(); ii.setStatus(guid, TransactionStatus.ACTIVE, null, null); IndexWriter writer = ii.getDeltaIndexWriter(guid, new AlfrescoStandardAnalyser()); Document doc = new Document(); for (int k = 0; k < 15; k++) { doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); } doc.add(new Field("TEXT", WORD_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); writer.addDocument(doc); ii.closeDeltaIndexWriter(guid); ii.setStatus(guid, TransactionStatus.PREPARING, null, null); ii.setPreparedState(guid, deletions, Collections.<String>emptySet(), 1, false); ii.getDeletions(guid); ii.setStatus(guid, TransactionStatus.PREPARED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); for (int j = 0; j < WORD_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j])); if (j < i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, Collections.<String>emptySet(), false); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < WORD_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); ii.setStatus(guid, TransactionStatus.COMMITTING, null, null); ii.setStatus(guid, TransactionStatus.COMMITTED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < WORD_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", WORD_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); } }
From source file:org.alfresco.repo.search.impl.lucene.index.IndexInfoTest.java
License:Open Source License
public void testCreateDeleteAndSearch() throws IOException { assertEquals(CREATE_LIST.length, UPDATE_LIST.length); StoreRef storeRef = new StoreRef("woof", "bingle"); System.setProperty("disableLuceneLocks", "true"); // no deletions - create only ArrayList<NodeRef> nodeRefs = new ArrayList<NodeRef>(); File tempLocation = TempFileProvider.getTempDir(); File testArea = new File(tempLocation, "IndexInfoTest"); File testDir = new File(testArea, "" + System.currentTimeMillis()); final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null); for (int i = 0; i < CREATE_LIST.length; i++) { IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); reader.close();/* w ww.j ava2s. c o m*/ String guid = GUID.generate(); ii.setStatus(guid, TransactionStatus.ACTIVE, null, null); IndexWriter writer = ii.getDeltaIndexWriter(guid, new AlfrescoStandardAnalyser()); Document doc = new Document(); for (int k = 0; k < 15; k++) { doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); } doc.add(new Field("TEXT", CREATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); NodeRef nodeRef = new NodeRef(storeRef, GUID.generate()); nodeRefs.add(nodeRef); doc.add(new Field("ID", nodeRef.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); writer.addDocument(doc); ii.closeDeltaIndexWriter(guid); ii.setStatus(guid, TransactionStatus.PREPARING, null, null); ii.setPreparedState(guid, Collections.<String>emptySet(), Collections.<String>emptySet(), 1, false); ii.getDeletions(guid); ii.setStatus(guid, TransactionStatus.PREPARED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j < i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, Collections.<String>emptySet(), Collections.<String>emptySet(), false); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); ii.setStatus(guid, TransactionStatus.COMMITTING, null, null); ii.setStatus(guid, TransactionStatus.COMMITTED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); } for (int i = 0; i < CREATE_LIST.length; i++) { HashSet<String> deletions = new HashSet<String>(); deletions.add(nodeRefs.get(i).toString()); IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), CREATE_LIST.length - i); reader.close(); String guid = GUID.generate(); ii.setStatus(guid, TransactionStatus.ACTIVE, null, null); ii.closeDeltaIndexWriter(guid); ii.setStatus(guid, TransactionStatus.PREPARING, null, null); ii.setPreparedState(guid, deletions, Collections.<String>emptySet(), 1, false); ii.getDeletions(guid); ii.setStatus(guid, TransactionStatus.PREPARED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), CREATE_LIST.length - i); int lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j >= i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, Collections.<String>emptySet(), false); assertEquals(reader.numDocs(), UPDATE_LIST.length - i - 1); lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j > i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); ii.setStatus(guid, TransactionStatus.COMMITTING, null, null); ii.setStatus(guid, TransactionStatus.COMMITTED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), UPDATE_LIST.length - i - 1); lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j > i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); IndexReader reader1 = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); IndexReader reader2 = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); IndexReader reader3 = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); reader3.close(); reader2.close(); reader1.close(); } }
From source file:org.alfresco.repo.search.impl.lucene.index.IndexInfoTest.java
License:Open Source License
public void testCreateUpdateAndSearch() throws IOException { assertEquals(CREATE_LIST.length, UPDATE_LIST.length); StoreRef storeRef = new StoreRef("woof", "bingle"); System.setProperty("disableLuceneLocks", "true"); // no deletions - create only ArrayList<NodeRef> nodeRefs = new ArrayList<NodeRef>(); File tempLocation = TempFileProvider.getTempDir(); File testArea = new File(tempLocation, "IndexInfoTest"); File testDir = new File(testArea, "" + System.currentTimeMillis()); final IndexInfo ii = IndexInfo.getIndexInfo(testDir, null); for (int i = 0; i < CREATE_LIST.length; i++) { IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); reader.close();/*from ww w . j a v a2 s. c om*/ String guid = GUID.generate(); ii.setStatus(guid, TransactionStatus.ACTIVE, null, null); IndexWriter writer = ii.getDeltaIndexWriter(guid, new AlfrescoStandardAnalyser()); Document doc = new Document(); for (int k = 0; k < 15; k++) { doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); } doc.add(new Field("TEXT", CREATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); NodeRef nodeRef = new NodeRef(storeRef, GUID.generate()); nodeRefs.add(nodeRef); doc.add(new Field("ID", nodeRef.toString(), Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); writer.addDocument(doc); ii.closeDeltaIndexWriter(guid); ii.setStatus(guid, TransactionStatus.PREPARING, null, null); ii.setPreparedState(guid, Collections.<String>emptySet(), Collections.<String>emptySet(), 1, false); ii.getDeletions(guid); ii.setStatus(guid, TransactionStatus.PREPARED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j < i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, Collections.<String>emptySet(), Collections.<String>emptySet(), false); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); ii.setStatus(guid, TransactionStatus.COMMITTING, null, null); ii.setStatus(guid, TransactionStatus.COMMITTED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), i + 1); for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertEquals(tds.doc(), j); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); } for (int i = 0; i < UPDATE_LIST.length; i++) { HashSet<String> deletions = new HashSet<String>(); deletions.add(nodeRefs.get(i).toString()); IndexReader reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), UPDATE_LIST.length); reader.close(); String guid = GUID.generate(); ii.setStatus(guid, TransactionStatus.ACTIVE, null, null); IndexWriter writer = ii.getDeltaIndexWriter(guid, new AlfrescoStandardAnalyser()); Document doc = new Document(); for (int k = 0; k < 15; k++) { doc.add(new Field("ID" + k, guid, Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); } doc.add(new Field("TEXT", UPDATE_LIST[i], Field.Store.NO, Field.Index.UN_TOKENIZED, Field.TermVector.NO)); writer.addDocument(doc); ii.closeDeltaIndexWriter(guid); ii.setStatus(guid, TransactionStatus.PREPARING, null, null); ii.setPreparedState(guid, deletions, Collections.<String>emptySet(), 1, false); ii.getDeletions(guid); ii.setStatus(guid, TransactionStatus.PREPARED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), UPDATE_LIST.length); int lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j >= i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } for (int j = 0; j < UPDATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j])); if (j < i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(guid, deletions, Collections.<String>emptySet(), false); assertEquals(reader.numDocs(), UPDATE_LIST.length); lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j > i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } for (int j = 0; j < UPDATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); ii.setStatus(guid, TransactionStatus.COMMITTING, null, null); ii.setStatus(guid, TransactionStatus.COMMITTED, null, null); reader = ii.getMainIndexReferenceCountingReadOnlyIndexReader(); assertEquals(reader.numDocs(), UPDATE_LIST.length); lastDoc = -1; for (int j = 0; j < CREATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", CREATE_LIST[j])); if (j > i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } for (int j = 0; j < UPDATE_LIST.length; j++) { TermDocs tds = reader.termDocs(new Term("TEXT", UPDATE_LIST[j])); if (j <= i) { assertTrue(tds.next()); assertTrue(tds.doc() > lastDoc); lastDoc = tds.doc(); } else { assertFalse(tds.next()); } tds.close(); } reader.close(); } }
From source file:org.apache.blur.manager.writer.BlurIndexSimpleWriterTest.java
License:Apache License
@Test public void testRollbackAndReopen() throws IOException { setupWriter(_configuration);/*from ww w . j a va2 s . co m*/ { IndexSearcherCloseable searcher = _writer.getIndexSearcher(); IndexReader reader = searcher.getIndexReader(); assertEquals(0, reader.numDocs()); searcher.close(); } MutatableAction action = new MutatableAction(_writer.getShardContext()); action.replaceRow(new Row()); try { _writer.process(action); fail("should throw exception"); } catch (IOException e) { // do nothing } { IndexSearcherCloseable searcher = _writer.getIndexSearcher(); IndexReader reader = searcher.getIndexReader(); assertEquals(0, reader.numDocs()); searcher.close(); } action.replaceRow(genRow()); _writer.process(action); { IndexSearcherCloseable searcher = _writer.getIndexSearcher(); IndexReader reader = searcher.getIndexReader(); assertEquals(1, reader.numDocs()); searcher.close(); } }
From source file:org.apache.blur.manager.writer.BlurIndexSimpleWriterTest.java
License:Apache License
@Test public void testBlurIndexWriter() throws IOException { setupWriter(_configuration);/* w w w . ja v a 2 s . com*/ long s = System.nanoTime(); int total = 0; TraceStorage oldStorage = Trace.getStorage(); Trace.setStorage(new BaseTraceStorage(new BlurConfiguration()) { @Override public void close() throws IOException { } @Override public void store(TraceCollector collector) { // try { // System.out.println(collector.toJsonObject().toString(1)); // } catch (JSONException e) { // e.printStackTrace(); // } } }); Trace.setupTrace("test"); for (int i = 0; i < TEST_NUMBER_WAIT_VISIBLE; i++) { MutatableAction action = new MutatableAction(_writer.getShardContext()); action.replaceRow(genRow()); _writer.process(action); IndexSearcherCloseable searcher = _writer.getIndexSearcher(); IndexReader reader = searcher.getIndexReader(); assertEquals(i + 1, reader.numDocs()); searcher.close(); total++; } Trace.tearDownTrace(); long e = System.nanoTime(); double seconds = (e - s) / 1000000000.0; double rate = total / seconds; System.out.println("Rate " + rate); IndexSearcherCloseable searcher = _writer.getIndexSearcher(); IndexReader reader = searcher.getIndexReader(); assertEquals(TEST_NUMBER_WAIT_VISIBLE, reader.numDocs()); searcher.close(); Trace.setStorage(oldStorage); }
From source file:org.apache.blur.manager.writer.BlurIndexSimpleWriterTest.java
License:Apache License
@Test public void testBlurIndexWriterFaster() throws IOException, InterruptedException { setupWriter(_configuration);// w ww.j ava 2 s . co m IndexSearcherCloseable searcher1 = _writer.getIndexSearcher(); IndexReader reader1 = searcher1.getIndexReader(); assertEquals(0, reader1.numDocs()); searcher1.close(); long s = System.nanoTime(); int total = 0; MutatableAction action = new MutatableAction(_writer.getShardContext()); for (int i = 0; i < TEST_NUMBER; i++) { action.replaceRow(genRow()); total++; } _writer.process(action); long e = System.nanoTime(); double seconds = (e - s) / 1000000000.0; double rate = total / seconds; System.out.println("Rate " + rate); // //wait one second for the data to become visible the test is set to // refresh once every 25 ms Thread.sleep(1000);// Hack for now _writer.refresh(); IndexSearcherCloseable searcher2 = _writer.getIndexSearcher(); IndexReader reader2 = searcher2.getIndexReader(); assertEquals(TEST_NUMBER, reader2.numDocs()); searcher2.close(); }
From source file:org.apache.ctakes.dictionary.lookup.ae.UmlsToSnomedLuceneConsumerImpl.java
License:Apache License
public UmlsToSnomedLuceneConsumerImpl(UimaContext aCtx, Properties properties, int maxListSize) throws Exception { super(aCtx, properties); iv_maxListSize = maxListSize;//from w w w. j a v a 2 s . c om IndexReader indexReader; String indexDirAbsPath = null; try { // ohnlp Bugs tracker ID: 3425014 SNOMED lucene dictionary lookup hardcodes resource path FileResource fResrc = (FileResource) aCtx.getResourceObject(SNOMED_CODE_LIST_CONFIG_PARM); if (fResrc == null) { logger.error("Unable to find config parm " + SNOMED_CODE_LIST_CONFIG_PARM + "."); } File indexDir = fResrc.getFile(); indexDirAbsPath = indexDir.getAbsolutePath(); try { logger.info("Using lucene index: " + indexDir.getAbsolutePath()); } catch (Exception e) { throw new AnnotatorConfigurationException(e); } // For the sample dictionary, we use the following lucene index. //indexPath = "lookup/snomed-like_codes_sample"; indexReader = IndexReader.open(FSDirectory.open(indexDir)); IndexSearcher indexSearcher = new IndexSearcher(indexReader); String lookupFieldName = props.getProperty(CUI_MAPPING_PRP_KEY); // We will lookup entries based on lookupFieldName snomedLikeCodesIndex = new LuceneDictionaryImpl(indexSearcher, lookupFieldName, iv_maxListSize); logger.info("Loaded Lucene index with " + indexReader.numDocs() + " entries."); } catch (IOException ioe) { logger.info("Lucene index: " + indexDirAbsPath); throw new DictionaryException(ioe); } }