Example usage for org.apache.lucene.store RAMDirectory close

List of usage examples for org.apache.lucene.store RAMDirectory close

Introduction

In this page you can find the example usage for org.apache.lucene.store RAMDirectory close.

Prototype

@Override
public void close() 

Source Link

Document

Closes the store to future operations, releasing associated memory.

Usage

From source file:com.appspot.socialinquirer.server.service.impl.AnalysisServiceImpl.java

License:Apache License

/**
 * Close ram directory.//w ww  .j a v  a  2s.c  o  m
 *
 * @param directory the directory
 */
private void closeRAMDirectory(RAMDirectory directory) {
    if (directory != null) {
        try {
            directory.close();
        } catch (Exception e) {
        }
    }
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexInputTest.java

License:Apache License

@Test
public void test1() throws IOException {
    RAMDirectory directory = new RAMDirectory();

    String name = "test";

    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    byte[] bs = "hello world".getBytes();
    output.writeBytes(bs, bs.length);//from w ww  . j  av a 2s . co m
    output.close();

    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    Cache cache = getCache();
    CacheIndexInput cacheInput = new CacheIndexInput(null, name, input, cache);
    byte[] buf = new byte[bs.length];
    cacheInput.readBytes(buf, 0, buf.length);
    cacheInput.close();

    assertArrayEquals(bs, buf);
    directory.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexInputTest.java

License:Apache License

@Test
public void test2() throws IOException {
    Cache cache = getCache();//from ww  w.  j av a  2s. c  o m
    RAMDirectory directory = new RAMDirectory();
    Random random = new Random(seed);

    String name = "test2";
    long size = (10 * 1024 * 1024) + 13;

    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    writeRandomData(size, random, output);
    output.close();

    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    IndexInput testInput = new CacheIndexInput(null, name, input.clone(), cache);
    readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
    readRandomDataShort(input, testInput, random, sampleSize);
    readRandomDataInt(input, testInput, random, sampleSize);
    readRandomDataLong(input, testInput, random, sampleSize);
    testInput.close();
    input.close();
    directory.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexOutputTest.java

License:Apache License

@Test
public void test1() throws IOException {
    Random random = new Random(seed);
    RAMDirectory directory = new RAMDirectory();
    IndexOutput output = directory.createOutput("test", IOContext.DEFAULT);

    Cache cache = CacheIndexInputTest.getCache();
    CacheIndexOutput indexOutput = new CacheIndexOutput(null, "test", output, cache);
    indexOutput.writeByte((byte) 1);
    indexOutput.writeByte((byte) 2);
    byte[] b = new byte[16000];
    random.nextBytes(b);//from ww w. j  ava 2 s.  c om
    indexOutput.writeBytes(b, 16000);
    indexOutput.close();

    IndexInput input = directory.openInput("test", IOContext.DEFAULT);
    assertEquals(16002, input.length());
    assertEquals(1, input.readByte());
    assertEquals(2, input.readByte());

    byte[] buf = new byte[16000];
    input.readBytes(buf, 0, 16000);
    input.close();
    assertArrayEquals(b, buf);
    directory.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexOutputTest.java

License:Apache License

@Test
public void test2() throws IOException {
    Cache cache = CacheIndexInputTest.getCache();
    RAMDirectory directory = new RAMDirectory();
    RAMDirectory directory2 = new RAMDirectory();

    Random random = new Random(seed);

    String name = "test2";
    long size = (10 * 1024 * 1024) + 13;

    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    IndexOutput output2 = directory2.createOutput(name, IOContext.DEFAULT);
    CacheIndexOutput cacheIndexOutput = new CacheIndexOutput(null, name, output2, cache);
    CacheIndexInputTest.writeRandomData(size, random, output, cacheIndexOutput);
    output.close();/*from  w w  w. ja  v a2s .  c om*/
    cacheIndexOutput.close();

    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    IndexInput testInput = directory2.openInput(name, IOContext.DEFAULT);
    CacheIndexInputTest.readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
    testInput.close();
    input.close();
    directory.close();
    directory2.close();
}

From source file:com.duroty.service.analyzer.LuceneFiltersAnalysis.java

License:Open Source License

public void service(String repositoryName, String messageName, MimeMessage mime)
        throws Exception, Throwable, OutOfMemoryError {
    Session hsession = null;//from  w  w  w .  j av a  2s .  c om
    RAMDirectory auxDir = null;

    try {
        hsession = hfactory.openSession();

        auxDir = new RAMDirectory();

        IndexWriter auxWriter = new IndexWriter(auxDir, analyzer, true);
        auxWriter.addDocument(luceneMessage.getDocPrincipal());
        auxWriter.optimize();
        auxWriter.close();

        Vector filters = getFilters(hsession, repositoryName);

        boolean setbox = true;

        String box = message.getMesBox();

        if (box.equals("SPAM")) {
            setbox = false;
        } else if (box.equals("DRAFT")) {
            setbox = false;
        }

        if (filters != null) {
            while (filters.size() > 0) {
                Filter filter = (Filter) filters.remove(0);
                IndexSearcher auxSearcher = new IndexSearcher(auxDir);

                org.apache.lucene.search.Query query = FilterQueryParser.parse(filter, analyzer);

                Hits hits = auxSearcher.search(query);

                if (hits.length() > 0) {
                    //he tingut una coincidencia de filtre per tant cal dur a terme les accions assocides
                    //al filtre
                    if (filter.isFilArchive() && setbox) {
                        //Marco un header per a que s'inserti a la carpeta d'archived
                        message.setMesBox("HIDDEN");
                    } else if (filter.isFilTrash() && setbox) {
                        message.setMesBox("TRASH");
                    } else {
                    }

                    if (filter.isFilImportant()) {
                        message.setMesFlagged(new Boolean(true));
                    }

                    if (filter.getLabel() != null) {
                        LabMes labMes = new LabMes(new LabMesId(message, filter.getLabel()));
                        message.addLabMeses(labMes);
                    }

                    if ((filter.getFilForwardTo() != null) && !filter.getFilForwardTo().trim().equals("")) {
                        InternetAddress forwardTo = null;

                        try {
                            forwardTo = new InternetAddress(filter.getFilForwardTo());
                        } catch (Exception e) {
                            forwardTo = null;
                        }

                        if (forwardTo != null) {
                            try {
                                InternetAddress recipient = (InternetAddress) mime.getFrom()[0];
                                forwardMailFromLabel(recipient, forwardTo, "FW: ", mime);
                            } catch (Exception ex) {
                                ex.printStackTrace();
                            }
                        }
                    }
                }
            }
        }
    } catch (Exception ex) {
        DLog.log(DLog.DEBUG, this.getClass(), ex);
    } finally {
        GeneralOperations.closeHibernateSession(hsession);

        if (auxDir != null) {
            auxDir.close();
        }
    }
}

From source file:com.shmsoft.dmass.main.FileProcessor.java

License:Apache License

/**
 * Search metadata and file contents//from   w  w w  .  ja v  a  2  s . co  m
 *
 * @param metadata
 * @return true if match is found else false
 */
private boolean isResponsive(Metadata metadata) {
    // set true if search finds a match
    boolean isResponsive = false;

    // get culling parameters
    String queryString = Project.getProject().getCullingAsTextBlock();

    // TODO parse important parameters to mappers and reducers individually, not globally
    IndexWriter writer = null;
    RAMDirectory idx = null;
    try {
        // construct a RAMDirectory to hold the in-memory representation of the index.
        idx = new RAMDirectory();

        // make a writer to create the index
        writer = new IndexWriter(idx, new StandardAnalyzer(Version.LUCENE_30), true,
                IndexWriter.MaxFieldLength.UNLIMITED);

        writer.addDocument(createDocument(metadata));

        // optimize and close the writer to finish building the index
        writer.optimize();
        writer.close();

        //adding the build index to FS
        if (Project.getProject().isLuceneFSIndexEnabled() && luceneIndex != null) {
            luceneIndex.addToIndex(idx);
        }

        SolrIndex.getInstance().addBatchData(metadata);

        if (queryString == null || queryString.trim().isEmpty()) {
            return true;
        }

        // build an IndexSearcher using the in-memory index
        Searcher searcher = new IndexSearcher(idx);
        // search directory
        isResponsive = search(searcher, queryString);

        searcher.close();
    } catch (Exception e) {
        // TODO handle this better
        // if anything happens - don't stop processing
        e.printStackTrace(System.out);
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
            if (idx != null) {
                idx.close();
            }
        } catch (Exception e) {
            // swallow exception, what else can you do now?
        }
    }
    return isResponsive;
}

From source file:fi.passiba.services.bibledata.sword.index.lucene.LuceneIndex.java

License:Open Source License

/**
 * Generate an index to use, telling the job about progress as you go.
 * @throws BookException If we fail to read the index files
 *///w ww .  j  av a2s .c  o m
public LuceneIndex(Book book, URI storage, boolean create) throws BookException {
    assert create;

    this.book = book;
    File finalPath = null;
    try {
        finalPath = NetUtil.getAsFile(storage);
        this.path = finalPath.getCanonicalPath();
    } catch (IOException ex) {
        throw new BookException(UserMsg.LUCENE_INIT, ex);
    }
    System.out.println("index path " + finalPath.getAbsolutePath());
    // Indexing the book is a good way to police data errors.
    DataPolice.setBook(book.getBookMetaData());

    IndexStatus finalStatus = IndexStatus.UNDONE;

    Analyzer analyzer = new LuceneAnalyzer(book);

    List errors = new ArrayList();
    File tempPath = new File(path + '.' + IndexStatus.CREATING.toString());

    try {
        synchronized (CREATING) {

            book.setIndexStatus(IndexStatus.CREATING);

            // An index is created by opening an IndexWriter with the create argument set to true.
            //IndexWriter writer = new IndexWriter(tempPath.getCanonicalPath(), analyzer, true);

            // Create the index in core.
            RAMDirectory ramDir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(ramDir, analyzer, true);

            generateSearchIndexImpl(errors, writer, book.getGlobalKeyList(), 0);

            // Consolidate the index into the minimum number of files.
            // writer.optimize(); /* Optimize is done by addIndexes */
            writer.close();

            // Write the core index to disk.
            IndexWriter fsWriter = new IndexWriter(tempPath.getCanonicalPath(), analyzer, true);
            fsWriter.addIndexes(new Directory[] { ramDir });
            fsWriter.close();

            // Free up the space used by the ram directory
            ramDir.close();

            tempPath.renameTo(finalPath);

            if (finalPath.exists()) {
                finalStatus = IndexStatus.DONE;
            }

            if (errors.size() > 0) {
                StringBuffer buf = new StringBuffer();
                Iterator iter = errors.iterator();
                while (iter.hasNext()) {
                    buf.append(iter.next());
                    buf.append('\n');
                }
                Reporter.informUser(this, UserMsg.BAD_VERSE, buf);
            }

        }
    } catch (IOException ex) {

        throw new BookException(UserMsg.LUCENE_INIT, ex);
    } finally {
        book.setIndexStatus(finalStatus);

    }
}

From source file:fr.ericlab.sondy.algo.eventdetection.ET.java

License:Open Source License

public static LinkedList<String> getFrequentBigrams(String tweets, HashSet<String> bigrams) {
    try {/*from   www.  jav a  2  s .  c o  m*/
        LinkedList<String> FCB = new LinkedList<String>();
        WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_36);
        RAMDirectory temporaryIndex = new RAMDirectory();
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_36, analyzer);
        IndexWriter temporaryWriter = new IndexWriter(temporaryIndex, config);
        Document doc = new Document();
        doc.add(new Field("content", tweets, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES));
        temporaryWriter.addDocument(doc);
        temporaryWriter.commit();
        IndexReader temporaryReader = IndexReader.open(temporaryWriter, true);
        TermEnum allTerms = temporaryReader.terms();
        while (allTerms.next()) {
            String term = allTerms.term().text();
            if (bigrams.contains(term)) {
                FCB.add(term);
            }
        }
        temporaryWriter.close();
        temporaryReader.close();
        temporaryIndex.close();
        return FCB;
    } catch (LockObtainFailedException ex) {
        Logger.getLogger(ET.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(ET.class.getName()).log(Level.SEVERE, null, ex);
    }
    return new LinkedList<>();
}

From source file:fr.ericlab.sondy.algo.eventdetection.MABED.java

License:Open Source License

MABEDTopic getRefinedTopic(MABEDTopic simpleTopic, int nbrelatedTerms) {
    MABEDTopic refinedTopic = new MABEDTopic();
    String[] frequentTerms = new String[nbrelatedTerms];
    try {/*from w  w w. jav  a 2 s.co m*/
        StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_36);
        RAMDirectory temporaryIndex = new RAMDirectory();
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_36, analyzer);
        IndexWriter temporaryWriter = new IndexWriter(temporaryIndex, config);
        Document doc = new Document();
        doc.add(new Field("content",
                dbAccess.getMessagesAsString(appVariables, simpleTopic.mainTerm, simpleTopic.I.timeSliceA,
                        simpleTopic.I.timeSliceB),
                Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
        temporaryWriter.addDocument(doc);
        temporaryWriter.commit();
        IndexReader temporaryReader = IndexReader.open(temporaryWriter, true);
        TermEnum allTerms = temporaryReader.terms();
        int minFreq = 0;
        TermInfoList termList = new TermInfoList();
        while (allTerms.next()) {
            String term = allTerms.term().text();
            if (!term.equals(simpleTopic.mainTerm) && term.length() > 1 && !appVariables.isStopWord(term)) {
                int cf = IndexAccess.getTermOccurenceCount(temporaryReader, term);
                if (cf > minFreq) {
                    termList.addTermInfo(new TermInfo(term, (int) cf));
                    termList.sortList();
                    if (termList.size() > nbrelatedTerms) {
                        termList.removeLast();
                    }
                    minFreq = termList.get(termList.size() - 1).occurence;
                }
            }
        }
        for (int i = 0; i < termList.size() && i < nbrelatedTerms; i++) {
            frequentTerms[i] = termList.get(i).text;
        }
        temporaryWriter.close();
        temporaryReader.close();
        temporaryIndex.close();

        float ref[] = indexAccess.getTermFrequency(appVariables, simpleTopic.mainTerm);
        float comp[];
        refinedTopic = new MABEDTopic(simpleTopic.mainTerm, simpleTopic.I, simpleTopic.score,
                simpleTopic.anomaly);
        for (int j = 0; j < nbrelatedTerms && frequentTerms[j] != null; j++) {
            comp = indexAccess.getTermFrequency(appVariables, frequentTerms[j]);
            double w = getErdemCoefficient(ref, comp, simpleTopic.I.timeSliceA, simpleTopic.I.timeSliceB);
            if (w >= _THETA_) {
                refinedTopic.relatedTerms.add(new MABEDWeightedTerm(frequentTerms[j], w));
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(MABED.class.getName()).log(Level.SEVERE, null, ex);
    }
    return refinedTopic;
}