Example usage for org.apache.lucene.store IndexInput close

List of usage examples for org.apache.lucene.store IndexInput close

Introduction

In this page you can find the example usage for org.apache.lucene.store IndexInput close.

Prototype

@Override
public abstract void close() throws IOException;

Source Link

Document

Closes the stream to further operations.

Usage

From source file:axiom.objectmodel.dom.LuceneManager.java

License:Open Source License

public static void commitSegments(String segmentsNew, Connection conn, Application app, Directory dir) {
    byte[] segmentContents = null;
    if (segmentsNew == null) {
        segmentsNew = TransFSDirectory.SEGMENTS_NEW;//TODO:IndexFileNames.getSegmentsNewFileName();
    }/*from  w w  w .  ja  v a2 s .co  m*/
    IndexInput input = null;
    try {
        input = dir.openInput(segmentsNew);
        int length = (int) input.length();
        segmentContents = new byte[length];
        try {
            input.readBytes(segmentContents, 0, length);
        } catch (IOException ioe) {
            segmentContents = null;
        }
    } catch (Exception ex) {
        app.logError(ErrorReporter.errorMsg(LuceneManager.class, "commitSegments"), ex);
        throw new TransactionException("LuceneTransaction.executeSubTransaction(): " + ex.getMessage());
    } finally {
        if (input != null) {
            try {
                input.close();
            } catch (Exception ignore) {
            }
            input = null;
        }
    }

    if (segmentContents == null || segmentContents.length == 0) {
        throw new TransactionException("LuceneTransaction.executeSubTransaction(): "
                + "The segments.new file does not contain any data to save.");
    }

    PreparedStatement pstmt = null;
    ByteArrayInputStream bais = null;
    boolean exceptionOccured = false;

    try {
        String sql = "UPDATE Lucene SET valid = ?, version = ? " + "WHERE valid = ? AND db_home = ?";
        pstmt = conn.prepareStatement(sql);
        int count = 1;
        pstmt.setBoolean(count++, false);
        pstmt.setInt(count++, getLuceneVersion());
        pstmt.setBoolean(count++, true);
        pstmt.setString(count++, app.getDbDir().getName());
        pstmt.executeUpdate();
        pstmt.close();
        pstmt = null;

        sql = "INSERT INTO Lucene (valid, db_home, segments, version) " + "VALUES (?,?,?,?)";
        pstmt = conn.prepareStatement(sql);
        count = 1;
        pstmt.setBoolean(count++, true);
        pstmt.setString(count++, app.getDbDir().getName());
        bais = new ByteArrayInputStream(segmentContents);
        pstmt.setBinaryStream(count++, bais, segmentContents.length);
        pstmt.setInt(count++, getLuceneVersion());
        int rows = pstmt.executeUpdate();
        if (rows < 1) {
            throw new Exception(
                    "LuceneTransactionManager.executeTransaction(): update didn't affect any rows in the database");
        }
    } catch (Exception ex) {
        exceptionOccured = true;
        throw new TransactionException(ex.getMessage());
    } finally {
        try {
            dir.deleteFile(segmentsNew);
        } catch (IOException ioex) {
            // i guess its okay if a random segments.new file is lying around, itll 
            // get overwritten on the next lucene write operation anyway
            app.logEvent(ErrorReporter.warningMsg(LuceneManager.class, "commitSegments") + "Could not delete "
                    + segmentsNew);
        }

        if (bais != null) {
            try {
                bais.close();
            } catch (Exception ignoreit) {
            }
            bais = null;
        }
        segmentContents = null;

        if (pstmt != null) {
            try {
                pstmt.close();
            } catch (SQLException sqle) {
                if (!exceptionOccured) {
                    throw new TransactionException(sqle.getMessage());
                }
            }
            pstmt = null;
        }
    }
}

From source file:axiom.objectmodel.dom.LuceneManager.java

License:Open Source License

public static void commitSegments(String segmentsNew, Connection conn, File dbhome, Directory dir) {
    byte[] segmentContents = null;
    if (segmentsNew == null) {
        segmentsNew = TransFSDirectory.SEGMENTS_NEW;//TODO:IndexFileNames.getSegmentsNewFileName();
    }/*w  w w.ja  v a  2 s .  c o  m*/
    IndexInput input = null;
    try {
        input = dir.openInput(segmentsNew);
        int length = (int) input.length();
        segmentContents = new byte[length];
        try {
            input.readBytes(segmentContents, 0, length);
        } catch (IOException ioe) {
            segmentContents = null;
        }
    } catch (Exception ex) {
        throw new TransactionException("LuceneTransaction.executeSubTransaction(): " + ex.getMessage());
    } finally {
        if (input != null) {
            try {
                input.close();
            } catch (Exception ignore) {
            }
            input = null;
        }
    }

    if (segmentContents == null || segmentContents.length == 0) {
        throw new TransactionException("LuceneTransaction.executeSubTransaction(): "
                + "The segments.new file does not contain any data to save.");
    }

    PreparedStatement pstmt = null;
    ByteArrayInputStream bais = null;
    boolean exceptionOccured = false;

    try {
        String sql = "UPDATE Lucene SET valid = ?, version = ? " + "WHERE valid = ? AND db_home = ?";
        pstmt = conn.prepareStatement(sql);
        int count = 1;
        pstmt.setBoolean(count++, false);
        pstmt.setInt(count++, getLuceneVersion());
        pstmt.setBoolean(count++, true);
        pstmt.setString(count++, dbhome.getName());
        pstmt.executeUpdate();
        pstmt.close();
        pstmt = null;

        sql = "INSERT INTO Lucene (valid, db_home, segments, version) " + "VALUES (?,?,?,?)";
        pstmt = conn.prepareStatement(sql);
        count = 1;
        pstmt.setBoolean(count++, true);
        pstmt.setString(count++, dbhome.getName());
        bais = new ByteArrayInputStream(segmentContents);
        pstmt.setBinaryStream(count++, bais, segmentContents.length);
        pstmt.setInt(count++, getLuceneVersion());
        int rows = pstmt.executeUpdate();
        System.out.println("EXECUTE update was a SUCCESS!!");
        if (rows < 1) {
            throw new Exception(
                    "LuceneTransactionManager.executeTransaction(): update didn't affect any rows in the database");
        }
    } catch (Exception ex) {
        exceptionOccured = true;
        throw new TransactionException(ex.getMessage());
    } finally {
        try {
            dir.deleteFile(segmentsNew);
        } catch (IOException ioex) {
            // i guess its okay if a random segments.new file is lying around, itll 
            // get overwritten on the next lucene write operation anyway
        }

        if (bais != null) {
            try {
                bais.close();
            } catch (Exception ignoreit) {
            }
            bais = null;
        }
        segmentContents = null;

        if (pstmt != null) {
            try {
                pstmt.close();
            } catch (SQLException sqle) {
                if (!exceptionOccured) {
                    throw new TransactionException(sqle.getMessage());
                }
            }
            pstmt = null;
        }
    }
}

From source file:com.bah.lucene.BaseDirectoryTestSuite.java

License:Apache License

@Test
public void testWritingAndReadingAFile() throws IOException {

    IndexOutput output = directory.createOutput("testing.test", IOContext.DEFAULT);
    output.writeInt(12345);/*from   w  ww  .  j  a v a  2 s  .  c o  m*/
    output.flush();
    output.close();

    IndexInput input = directory.openInput("testing.test", IOContext.DEFAULT);
    assertEquals(12345, input.readInt());
    input.close();

    String[] listAll = directory.listAll();
    assertEquals(1, listAll.length);
    assertEquals("testing.test", listAll[0]);

    assertEquals(4, directory.fileLength("testing.test"));

    IndexInput input1 = directory.openInput("testing.test", IOContext.DEFAULT);

    IndexInput input2 = (IndexInput) input1.clone();
    assertEquals(12345, input2.readInt());
    input2.close();

    assertEquals(12345, input1.readInt());
    input1.close();

    assertFalse(directory.fileExists("testing.test.other"));
    assertTrue(directory.fileExists("testing.test"));
    directory.deleteFile("testing.test");
    assertFalse(directory.fileExists("testing.test"));
}

From source file:com.bah.lucene.BaseDirectoryTestSuite.java

License:Apache License

private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException {
    int reads = random.nextInt(MAX_NUMBER_OF_READS);
    IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT);
    IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT);
    assertEquals(fsInput.length(), hdfsInput.length());
    int fileLength = (int) fsInput.length();
    for (int i = 0; i < reads; i++) {
        byte[] fsBuf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength))
                + MIN_BUFFER_SIZE];/*from   ww  w  . j a va  2s.c  o  m*/
        byte[] hdfsBuf = new byte[fsBuf.length];
        int offset = random.nextInt(fsBuf.length);
        int length = random.nextInt(fsBuf.length - offset);
        int pos = random.nextInt(fileLength - length);
        fsInput.seek(pos);
        fsInput.readBytes(fsBuf, offset, length);
        hdfsInput.seek(pos);
        hdfsInput.readBytes(hdfsBuf, offset, length);
        for (int f = offset; f < length; f++) {
            if (fsBuf[f] != hdfsBuf[f]) {
                fail();
            }
        }
    }
    fsInput.close();
    hdfsInput.close();
}

From source file:com.bah.lucene.blockcache.BlockDirectoryTest.java

License:Apache License

private void assertInputsEquals(String name, Directory fsDir, Directory hdfs) throws IOException {
    int reads = random.nextInt(MAX_NUMBER_OF_READS);
    IndexInput fsInput = fsDir.openInput(name, IOContext.DEFAULT);
    IndexInput hdfsInput = hdfs.openInput(name, IOContext.DEFAULT);
    assertEquals(fsInput.length(), hdfsInput.length());
    int fileLength = (int) fsInput.length();
    if (fileLength != 0) {
        for (int i = 0; i < reads; i++) {
            byte[] fsBuf = new byte[random.nextInt(Math.min(MAX_BUFFER_SIZE - MIN_BUFFER_SIZE, fileLength))
                    + MIN_BUFFER_SIZE];//from  w w w.j a  v  a 2s. c  om
            byte[] hdfsBuf = new byte[fsBuf.length];
            int offset = random.nextInt(fsBuf.length);
            int length = random.nextInt(fsBuf.length - offset);
            int pos = random.nextInt(fileLength - length);
            fsInput.seek(pos);
            fsInput.readBytes(fsBuf, offset, length);
            hdfsInput.seek(pos);
            hdfsInput.readBytes(hdfsBuf, offset, length);
            for (int f = offset; f < length; f++) {
                if (fsBuf[f] != hdfsBuf[f]) {
                    fail(Long.toString(seed) + " read [" + i + "]");
                }
            }
        }
    }
    fsInput.close();
    hdfsInput.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheDirectoryTest.java

License:Apache License

@Test
public void test1() throws IOException {
    IndexOutput output = _cacheDirectory.createOutput("test.file", IOContext.DEFAULT);
    output.writeLong(0);//from  www . java 2 s.  co  m
    output.writeLong(1);
    output.writeLong(2);
    output.close();

    IndexInput input = _cacheDirectory.openInput("test.file", IOContext.DEFAULT);
    assertEquals(0, input.readLong());
    assertEquals(1, input.readLong());
    assertEquals(2, input.readLong());
    input.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheDirectoryTest.java

License:Apache License

@Test
public void test2() throws IOException {
    IndexOutput output = _cacheDirectory.createOutput("test.file", IOContext.DEFAULT);
    byte[] buf = new byte[9000];
    for (int i = 0; i < buf.length; i++) {
        buf[i] = (byte) i;
    }/*from   ww  w  . java2  s  . c  om*/
    output.writeBytes(buf, buf.length);
    output.close();

    IndexInput input = _cacheDirectory.openInput("test.file", IOContext.DEFAULT);
    assertEquals(9000, input.length());
    input.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexInputTest.java

License:Apache License

@Test
public void test2() throws IOException {
    Cache cache = getCache();//from  ww w. java  2  s  . c  o  m
    RAMDirectory directory = new RAMDirectory();
    Random random = new Random(seed);

    String name = "test2";
    long size = (10 * 1024 * 1024) + 13;

    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    writeRandomData(size, random, output);
    output.close();

    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    IndexInput testInput = new CacheIndexInput(null, name, input.clone(), cache);
    readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
    readRandomDataShort(input, testInput, random, sampleSize);
    readRandomDataInt(input, testInput, random, sampleSize);
    readRandomDataLong(input, testInput, random, sampleSize);
    testInput.close();
    input.close();
    directory.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexOutputTest.java

License:Apache License

@Test
public void test1() throws IOException {
    Random random = new Random(seed);
    RAMDirectory directory = new RAMDirectory();
    IndexOutput output = directory.createOutput("test", IOContext.DEFAULT);

    Cache cache = CacheIndexInputTest.getCache();
    CacheIndexOutput indexOutput = new CacheIndexOutput(null, "test", output, cache);
    indexOutput.writeByte((byte) 1);
    indexOutput.writeByte((byte) 2);
    byte[] b = new byte[16000];
    random.nextBytes(b);/*w  w  w.  j av a2 s . c  o m*/
    indexOutput.writeBytes(b, 16000);
    indexOutput.close();

    IndexInput input = directory.openInput("test", IOContext.DEFAULT);
    assertEquals(16002, input.length());
    assertEquals(1, input.readByte());
    assertEquals(2, input.readByte());

    byte[] buf = new byte[16000];
    input.readBytes(buf, 0, 16000);
    input.close();
    assertArrayEquals(b, buf);
    directory.close();
}

From source file:com.bah.lucene.blockcache_v2.CacheIndexOutputTest.java

License:Apache License

@Test
public void test2() throws IOException {
    Cache cache = CacheIndexInputTest.getCache();
    RAMDirectory directory = new RAMDirectory();
    RAMDirectory directory2 = new RAMDirectory();

    Random random = new Random(seed);

    String name = "test2";
    long size = (10 * 1024 * 1024) + 13;

    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    IndexOutput output2 = directory2.createOutput(name, IOContext.DEFAULT);
    CacheIndexOutput cacheIndexOutput = new CacheIndexOutput(null, name, output2, cache);
    CacheIndexInputTest.writeRandomData(size, random, output, cacheIndexOutput);
    output.close();//w ww .  ja  v  a  2 s  .c om
    cacheIndexOutput.close();

    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    IndexInput testInput = directory2.openInput(name, IOContext.DEFAULT);
    CacheIndexInputTest.readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
    testInput.close();
    input.close();
    directory.close();
    directory2.close();
}