List of usage examples for org.apache.lucene.store ChecksumIndexInput ChecksumIndexInput
protected ChecksumIndexInput(String resourceDescription)
From source file:com.devwebsphere.wxslucene.GridDirectory.java
License:Open Source License
public static void copy(Directory src, GridDirectory dest, boolean closeDirSrc) throws IOException { final String[] files = src.listAll(); IndexFileNameFilter filter = IndexFileNameFilter.getFilter(); byte[] buf = new byte[COPY_BUFFER_SIZE]; for (int i = 0; i < files.length; i++) { if (!filter.accept(null, files[i])) continue; IndexOutput os = null;/*from ww w. jav a2 s . c om*/ ChecksumIndexInput is = null; try { // create file in dest directory os = dest.createOutput(files[i]); // read current file is = new ChecksumIndexInput(src.openInput(files[i])); // and copy to dest directory long len = is.length(); long readCount = 0; while (readCount < len) { int toRead = readCount + COPY_BUFFER_SIZE > len ? (int) (len - readCount) : COPY_BUFFER_SIZE; is.readBytes(buf, 0, toRead); os.writeBytes(buf, toRead); readCount += toRead; } long src_sum = is.getChecksum(); os.flush(); // this code can just compare the new file with the old one // to make sure it's copied correctly ChecksumIndexInput dst_check_stream = new ChecksumIndexInput(dest.openInput(files[i])); len = dst_check_stream.length(); readCount = 0; while (readCount < len) { int toRead = readCount + COPY_BUFFER_SIZE > len ? (int) (len - readCount) : COPY_BUFFER_SIZE; dst_check_stream.readBytes(buf, 0, toRead); readCount += toRead; } long dst_sum = dst_check_stream.getChecksum(); if (dst_sum == src_sum) { logger.log(Level.INFO, "Verify " + files[i] + " was successful"); } else { logger.log(Level.INFO, "Verify " + files[i] + " failed"); throw new IllegalStateException("File " + files[i] + " failed verification"); } } finally { // graceful cleanup try { if (os != null) os.close(); } finally { if (is != null) is.close(); } } } if (closeDirSrc) src.close(); }
From source file:com.zimbra.cs.index.LuceneIndexRepair.java
License:Open Source License
/** * Repair the index data./*from w ww . j a va 2s . c o m*/ * * @return number of repairs conducted, or 0 if nothing was repaired * @throws IOException error on accessing the index data */ int repair() throws IOException { String segsFilename = SegmentInfos.getCurrentSegmentFileName(directory); long gen = SegmentInfos.generationFromSegmentsFileName(segsFilename); String nextSegsFilename = getSegmentsFilename(++gen); ChecksumIndexInput input = new ChecksumIndexInput(directory.openInput(segsFilename)); try { ChecksumIndexOutput output = new ChecksumIndexOutput(directory.createOutput(nextSegsFilename)); try { convert(input, output); } finally { output.close(); } } finally { input.close(); } if (repaired == 0) { directory.deleteFile(nextSegsFilename); return repaired; } directory.sync(Collections.singleton(nextSegsFilename)); try { commit(gen); } catch (IOException e) { directory.deleteFile(nextSegsFilename); throw e; } String backupFilename = "REPAIR_" + DateTools.dateToString(new Date(), DateTools.Resolution.SECOND) + "." + segsFilename; rename(segsFilename, backupFilename); return repaired; }
From source file:org.apache.blur.mapreduce.lib.GenericRecordReader.java
License:Apache License
private SegmentInfoPerCommit segmentInfosRead(Directory directory, String segmentFileName, String segmentInfoName) throws IOException { boolean success = false; ChecksumIndexInput input = new ChecksumIndexInput(directory.openInput(segmentFileName, IOContext.READ)); try {//from ww w.ja v a 2 s . co m final int format = input.readInt(); if (format == CodecUtil.CODEC_MAGIC) { // 4.0+ CodecUtil.checkHeaderNoMagic(input, "segments", SegmentInfos.VERSION_40, SegmentInfos.VERSION_40); input.readLong();// read version input.readInt(); // read counter int numSegments = input.readInt(); if (numSegments < 0) { throw new CorruptIndexException( "invalid segment count: " + numSegments + " (resource: " + input + ")"); } for (int seg = 0; seg < numSegments; seg++) { String segName = input.readString(); Codec codec = Codec.forName(input.readString()); SegmentInfo info = codec.segmentInfoFormat().getSegmentInfoReader().read(directory, segName, IOContext.READ); info.setCodec(codec); long delGen = input.readLong(); int delCount = input.readInt(); if (delCount < 0 || delCount > info.getDocCount()) { throw new CorruptIndexException( "invalid deletion count: " + delCount + " (resource: " + input + ")"); } if (segName.equals(segmentInfoName)) { success = true; return new SegmentInfoPerCommit(info, delCount, delGen); } } } else { throw new IOException("Legacy Infos not supported for dir [" + directory + "]."); } throw new IOException("Segment [" + segmentInfoName + "] nout found in dir [" + directory + "]"); } finally { if (!success) { IOUtils.closeWhileHandlingException(input); } else { input.close(); } } }
From source file:org.elasticsearch.util.lucene.Directories.java
License:Apache License
/** * Computes the checksum of the content represented by the provided index input. * * <p>Closes the index input once checksum is computed. *//*w w w . j a v a2s. co m*/ public static long checksum(IndexInput indexInput) throws IOException { final int BUFFER_SIZE = 16384; byte[] buf = new byte[BUFFER_SIZE]; ChecksumIndexInput cii = new ChecksumIndexInput(indexInput); long len = cii.length(); long readCount = 0; while (readCount < len) { int toRead = readCount + BUFFER_SIZE > len ? (int) (len - readCount) : BUFFER_SIZE; cii.readBytes(buf, 0, toRead); readCount += toRead; } cii.close(); return cii.getChecksum(); }