Example usage for java.util.zip CRC32 CRC32

List of usage examples for java.util.zip CRC32 CRC32

Introduction

In this page you can find the example usage for java.util.zip CRC32 CRC32.

Prototype

public CRC32() 

Source Link

Document

Creates a new CRC32 object.

Usage

From source file:org.exist.xquery.modules.compression.AbstractCompressFunction.java

/**
 * Adds a document to a archive//from   w  w  w .  ja v a 2s . c  om
 * 
 * @param os
 *            The Output Stream to add the document to
 * @param doc
 *            The document to add to the archive
 * @param useHierarchy
 *            Whether to use a folder hierarchy in the archive file that
 *            reflects the collection hierarchy
 */
private void compressResource(OutputStream os, DocumentImpl doc, boolean useHierarchy, String stripOffset,
        String method, String name) throws IOException, SAXException {
    // create an entry in the Tar for the document
    Object entry = null;
    byte[] value = new byte[0];
    CRC32 chksum = new CRC32();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();

    if (name != null) {
        entry = newEntry(name);
    } else if (useHierarchy) {
        String docCollection = doc.getCollection().getURI().toString();
        XmldbURI collection = XmldbURI.create(removeLeadingOffset(docCollection, stripOffset));
        entry = newEntry(collection.append(doc.getFileURI()).toString());
    } else {
        entry = newEntry(doc.getFileURI().toString());
    }

    if (doc.getResourceType() == DocumentImpl.XML_FILE) {
        // xml file
        Serializer serializer = context.getBroker().getSerializer();
        serializer.setUser(context.getUser());
        serializer.setProperty("omit-xml-declaration", "no");
        getDynamicSerializerOptions(serializer);
        String strDoc = serializer.serialize(doc);
        value = strDoc.getBytes();
    } else if (doc.getResourceType() == DocumentImpl.BINARY_FILE) {
        // binary file
        InputStream is = context.getBroker().getBinaryResource((BinaryDocument) doc);
        byte[] data = new byte[16384];
        int len = 0;
        while ((len = is.read(data, 0, data.length)) > 0) {
            baos.write(data, 0, len);
        }
        is.close();
        value = baos.toByteArray();
    }
    // close the entry
    if (entry instanceof ZipEntry && "store".equals(method)) {
        ((ZipEntry) entry).setMethod(ZipOutputStream.STORED);
        chksum.update(value);
        ((ZipEntry) entry).setCrc(chksum.getValue());
        ((ZipEntry) entry).setSize(value.length);
    }

    putEntry(os, entry);
    os.write(value);
    closeEntry(os);
}

From source file:org.kuali.kfs.module.ar.document.service.impl.DunningLetterServiceImpl.java

/**
 * This method generates the actual pdf files to print.
 *
 * @param mapping//from  w w w .  j a  va 2  s  .co m
 * @param form
 * @param list
 * @return
 */
@Override
public boolean createZipOfPDFs(byte[] report, ByteArrayOutputStream baos) throws IOException {

    ZipOutputStream zos = new ZipOutputStream(baos);
    int bytesRead;
    byte[] buffer = new byte[1024];
    CRC32 crc = new CRC32();

    if (ObjectUtils.isNotNull(report)) {
        BufferedInputStream bis = new BufferedInputStream(new ByteArrayInputStream(report));
        crc.reset();
        while ((bytesRead = bis.read(buffer)) != -1) {
            crc.update(buffer, 0, bytesRead);
        }
        bis.close();
        // Reset to beginning of input stream
        bis = new BufferedInputStream(new ByteArrayInputStream(report));
        ZipEntry entry = new ZipEntry("DunningLetters&Invoices-"
                + getDateTimeService().toDateStringForFilename(getDateTimeService().getCurrentDate()) + ".pdf");
        entry.setMethod(ZipEntry.STORED);
        entry.setCompressedSize(report.length);
        entry.setSize(report.length);
        entry.setCrc(crc.getValue());
        zos.putNextEntry(entry);
        while ((bytesRead = bis.read(buffer)) != -1) {
            zos.write(buffer, 0, bytesRead);
        }
        bis.close();
    }

    zos.close();
    return true;
}

From source file:org.apache.hadoop.raid.Decoder.java

/**
 * Having buffers of the right size is extremely important. If the the
 * buffer size is not a divisor of the block size, we may end up reading
 * across block boundaries./*ww w. j a va 2  s.co m*/
 *
 * If codec's simulateBlockFix is true, we use the old code to fix blocks
 * and verify the new code's result is the same as the old one.
 */
CRC32 fixErasedBlock(FileSystem srcFs, FileStatus srcStat, FileSystem parityFs, Path parityFile,
        boolean fixSource, long blockSize, long errorOffset, long limit, boolean partial, OutputStream out,
        StripeInfo si, Context context, boolean skipVerify) throws IOException, InterruptedException {
    configureBuffers(blockSize);
    Progressable reporter = context;
    if (reporter == null) {
        reporter = RaidUtils.NULL_PROGRESSABLE;
    }

    Path srcFile = srcStat.getPath();
    LOG.info("Code: " + this.codec.id + " simulation: " + this.codec.simulateBlockFix);
    if (this.codec.simulateBlockFix) {
        String oldId = getOldCodeId(srcStat);
        if (oldId == null) {
            // Couldn't find old codec for block fixing, throw exception instead
            throw new IOException("Couldn't find old parity files for " + srcFile
                    + ". Won't reconstruct the block since code " + this.codec.id + " is still under test");
        }
        if (partial) {
            throw new IOException(
                    "Couldn't reconstruct the partial data because " + "old decoders don't support it");
        }
        Decoder decoder = (oldId.equals("xor")) ? new XORDecoder(conf) : new ReedSolomonDecoder(conf);
        CRC32 newCRC = null;
        long newLen = 0;
        if (!skipVerify) {
            newCRC = new CRC32();
            newLen = this.fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize,
                    errorOffset, limit, partial, null, context, newCRC, null, false, null);
        }
        CRC32 oldCRC = (skipVerify && checksumStore == null) ? null : new CRC32();
        long oldLen = decoder.fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize,
                errorOffset, limit, partial, out, context, oldCRC, si, false, null);

        if (!skipVerify) {
            if (newCRC.getValue() != oldCRC.getValue() || newLen != oldLen) {
                LOG.error(" New code " + codec.id + " produces different data from old code " + oldId
                        + " during fixing " + (fixSource ? srcFile.toString() : parityFile.toString())
                        + " (offset=" + errorOffset + ", limit=" + limit + ")" + " checksum:"
                        + newCRC.getValue() + ", " + oldCRC.getValue() + " len:" + newLen + ", " + oldLen);
                LogUtils.logRaidReconstructionMetrics(LOGRESULTS.FAILURE, 0, codec, -1, -1, -1, numReadBytes,
                        numReadBytesRemoteRack, (fixSource ? srcFile : parityFile), errorOffset,
                        LOGTYPES.OFFLINE_RECONSTRUCTION_SIMULATION, (fixSource ? srcFs : parityFs), null,
                        context, -1);

                if (context != null) {
                    context.getCounter(RaidCounter.BLOCK_FIX_SIMULATION_FAILED).increment(1L);
                    // The key includes the file path and simulation failure state
                    String outkey = DistBlockIntegrityMonitor.SIMULATION_FAILED_FILE + ",";
                    if (fixSource) {
                        outkey += srcFile.toUri().getPath();
                    } else {
                        outkey += parityFile.toUri().getPath();
                    }
                    // The value is the task id
                    String outval = context.getConfiguration().get("mapred.task.id");
                    context.write(new Text(outkey), new Text(outval));
                }
            } else {
                LOG.info(" New code " + codec.id + " produces the same data with old code " + oldId
                        + " during fixing " + (fixSource ? srcFile.toString() : parityFile.toString())
                        + " (offset=" + errorOffset + ", limit=" + limit + ")");
                if (context != null) {
                    context.getCounter(RaidCounter.BLOCK_FIX_SIMULATION_SUCCEEDED).increment(1L);
                }
            }
        }
        return oldCRC;
    } else {
        CRC32 crc = null;
        if (checksumStore != null) {
            crc = new CRC32();
        }
        fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize, errorOffset, limit,
                partial, out, context, crc, si, false, null);
        return crc;
    }
}

From source file:net.sourceforge.subsonic.controller.DownloadController.java

/**
 * Computes the CRC checksum for the given file.
 *
 * @param file The file to compute checksum for.
 * @return A CRC32 checksum.//ww w  .j  a v a  2 s  . c o  m
 * @throws IOException If an I/O error occurs.
 */
private long computeCrc(File file) throws IOException {
    CRC32 crc = new CRC32();
    InputStream in = new FileInputStream(file);

    try {

        byte[] buf = new byte[8192];
        int n = in.read(buf);
        while (n != -1) {
            crc.update(buf, 0, n);
            n = in.read(buf);
        }

    } finally {
        in.close();
    }

    return crc.getValue();
}

From source file:org.commoncrawl.service.listcrawler.CrawlHistoryManager.java

private void cacheCrawlHistoryLog(File localCacheDir, long timestamp) throws IOException {

    SequenceFile.Reader reader = null;
    Path mapFilePath = new Path(_remoteDataDirectory, CRAWL_HISTORY_HDFS_LOGFILE_PREFIX + timestamp);
    Path indexFilePath = new Path(mapFilePath, "index");
    Path dataFilePath = new Path(mapFilePath, "data");
    File cacheFilePath = new File(localCacheDir, CRAWL_HISTORY_HDFS_LOGFILE_PREFIX + timestamp);

    SequenceFile.Reader indexReader = new SequenceFile.Reader(_remoteFileSystem, dataFilePath,
            CrawlEnvironment.getHadoopConfig());

    ValueBytes valueBytes = indexReader.createValueBytes();
    DataOutputBuffer keyBytes = new DataOutputBuffer();
    DataInputBuffer keyBuffer = new DataInputBuffer();
    DataOutputBuffer finalOutputStream = new DataOutputBuffer();
    DataOutputBuffer uncompressedValueBytes = new DataOutputBuffer();
    URLFP fp = new URLFP();

    try {/* w  w w  . j av a 2  s. co  m*/
        while (indexReader.nextRaw(keyBytes, valueBytes) != -1) {

            keyBuffer.reset(keyBytes.getData(), 0, keyBytes.getLength());
            // read fingerprint ...
            fp.readFields(keyBuffer);
            // write hash only
            finalOutputStream.writeLong(fp.getUrlHash());
            uncompressedValueBytes.reset();
            // write value bytes to intermediate buffer ...
            valueBytes.writeUncompressedBytes(uncompressedValueBytes);
            // write out uncompressed length
            WritableUtils.writeVInt(finalOutputStream, uncompressedValueBytes.getLength());
            // write out bytes
            finalOutputStream.write(uncompressedValueBytes.getData(), 0, uncompressedValueBytes.getLength());
        }
        // delete existing ...
        cacheFilePath.delete();
        // compute crc ...
        CRC32 crc = new CRC32();
        crc.update(finalOutputStream.getData(), 0, finalOutputStream.getLength());
        // open final output stream
        DataOutputStream fileOutputStream = new DataOutputStream(
                new BufferedOutputStream(new FileOutputStream(cacheFilePath)));

        try {
            fileOutputStream.writeLong(crc.getValue());
            fileOutputStream.write(finalOutputStream.getData(), 0, finalOutputStream.getLength());
            fileOutputStream.flush();
        } catch (IOException e) {
            LOG.error(CCStringUtils.stringifyException(e));
            fileOutputStream.close();
            fileOutputStream = null;
            cacheFilePath.delete();
            throw e;
        } finally {
            if (fileOutputStream != null) {
                fileOutputStream.close();
            }
        }
    } finally {
        if (indexReader != null) {
            indexReader.close();
        }
    }
}

From source file:org.apache.hadoop.raid.TestBlockCopier.java

private long[] createRandomFileDispersed(Path file, int numBlocks, DatanodeDescriptor primaryNode,
        DatanodeDescriptor altNode) throws IOException, InterruptedException {

    BlockPlacementPolicyFakeData bp = BlockPlacementPolicyFakeData.lastInstance;
    DatanodeDescriptor tmp = bp.overridingDatanode;

    final int repl = 1;
    long[] crcs = new long[numBlocks];
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(file, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, BLOCK_SIZE);

    // Create the first block on the alt node
    bp.overridingDatanode = altNode;/*ww  w .j  a  v a  2  s.co m*/

    // fill random data into file
    final byte[] b = new byte[(int) BLOCK_SIZE];
    LOG.info("Writing first block (alt. host)");
    rand.nextBytes(b);
    stm.write(b);
    crc.update(b);
    crcs[0] = crc.getValue();

    stm.flush();
    Thread.sleep(1000); // What a hack. Le sigh.

    // Now we want to write on the altNode
    bp.overridingDatanode = primaryNode;

    // Write the rest of the blocks on primaryNode
    for (int i = 1; i < numBlocks; i++) {
        LOG.info("Writing block number " + i + " (primary host)");

        rand.nextBytes(b);
        stm.write(b);
        crc.reset();
        crc.update(b);
        crcs[i] = crc.getValue();
    }
    stm.close();
    Thread.sleep(1000);

    // Reset this guy
    bp.overridingDatanode = tmp;

    return crcs;
}

From source file:org.apache.hadoop.raid.tools.FastFileCheck.java

/**
 * Verify the certain offset of a file.//from w w w.  j a va2  s .c o  m
 */
private static boolean verifyFile(Configuration conf, FileSystem srcFs, FileSystem parityFs, FileStatus stat,
        Path parityPath, Codec codec, long blockOffset, Progressable reporter)
        throws IOException, InterruptedException {
    Path srcPath = stat.getPath();
    LOG.info("Verify file: " + srcPath + " at offset: " + blockOffset);
    int limit = (int) Math.min(stat.getBlockSize(), DEFAULT_VERIFY_LEN);
    if (reporter == null) {
        reporter = RaidUtils.NULL_PROGRESSABLE;
    }

    // try to decode.
    Decoder decoder = new Decoder(conf, codec);
    if (codec.isDirRaid) {
        decoder.connectToStore(srcPath);
    }

    List<Long> errorOffsets = new ArrayList<Long>();
    // first limit bytes
    errorOffsets.add(blockOffset);
    long left = Math.min(stat.getBlockSize(), stat.getLen() - blockOffset);
    if (left > limit) {
        // last limit bytes
        errorOffsets.add(blockOffset + left - limit);
        // random limit bytes.
        errorOffsets.add(blockOffset + rand.nextInt((int) (left - limit)));
    }

    byte[] buffer = new byte[limit];
    FSDataInputStream is = srcFs.open(srcPath);
    try {
        for (long errorOffset : errorOffsets) {
            is.seek(errorOffset);
            is.read(buffer);
            // calculate the oldCRC.
            CRC32 oldCrc = new CRC32();
            oldCrc.update(buffer);

            CRC32 newCrc = new CRC32();
            DecoderInputStream stream = decoder.new DecoderInputStream(RaidUtils.NULL_PROGRESSABLE, limit,
                    stat.getBlockSize(), errorOffset, srcFs, srcPath, parityFs, parityPath, null, null, false);
            try {
                stream.read(buffer);
                newCrc.update(buffer);
                if (oldCrc.getValue() != newCrc.getValue()) {
                    LogUtils.logFileCheckMetrics(LOGRESULTS.FAILURE, codec, srcPath, srcFs, errorOffset, limit,
                            null, reporter);
                    LOG.error("mismatch crc, old " + oldCrc.getValue() + ", new " + newCrc.getValue()
                            + ", for file: " + srcPath + " at offset " + errorOffset + ", read limit " + limit);
                    return false;
                }
            } finally {
                reporter.progress();
                if (stream != null) {
                    stream.close();
                }
            }
        }
        return true;
    } finally {
        is.close();
    }
}

From source file:com.blackducksoftware.tools.commonframework.core.config.ConfigurationFileTest.java

/**
 * Test handling of legacy Passwords in plain text with
 * password.isplaintext=true Does not verify that non-password-related lines
 * survive as-is, but testLegacyPasswordPlainTextIsplaintextNotSet() does
 * that./* w  ww  .j  a v a  2  s  .co m*/
 *
 * @throws Exception
 */
@Test
public void testLegacyPasswordPlainTextIsplaintextTrue() throws Exception {
    final File sourceConfigFile = new File("src/test/resources/psw_encryption/legacy_plain_set.properties");
    final File configFile = File.createTempFile(
            "com.blackducksoftware.tools.commonframework.core.config.ConfigurationFileTest", "test2");
    filesToDelete.add(configFile);
    configFile.deleteOnExit();
    FileUtils.copyFile(sourceConfigFile, configFile);
    final ConfigurationFile cf = new ConfigurationFile(configFile.getAbsolutePath());

    List<String> updatedLines = null;
    if (cf.isInNeedOfUpdate()) {
        updatedLines = cf.saveWithEncryptedPasswords();
    }

    assertTrue(updatedLines.size() > 0);
    final Iterator<String> updatedLinesIter = updatedLines.iterator();
    while (updatedLinesIter.hasNext()) {
        String updatedLine = updatedLinesIter.next();

        // make sure obsolete properties have been removed
        assertFalse(updatedLine.matches("^.*\\.password\\.isplaintext=.*$"));

        // If this is a password, verify that it was encoded, and that the
        // isencrypted=true was inserted after it
        if (updatedLine.startsWith("cc.password=")) {
            assertEquals("cc.password=cc_password", updatedLine);
            updatedLine = updatedLinesIter.next();
            assertEquals("cc.password.isencrypted=false", updatedLine);
        } else if (updatedLine.startsWith("protex.password=")) {
            assertEquals("protex.password=protex_password", updatedLine);
            updatedLine = updatedLinesIter.next();
            assertEquals("protex.password.isencrypted=false", updatedLine);
        } else if (updatedLine.startsWith("connector.0.password=")) {
            assertEquals("connector.0.password=connector_password", updatedLine);
            updatedLine = updatedLinesIter.next();
            assertEquals("connector.0.password.isencrypted=false", updatedLine);
        }
    }

    final File testGeneratedUpdatedFile = File.createTempFile(
            "com.blackducksoftware.tools.commonframework.core.config.ConfigurationFileTest",
            "test2_testGeneratedUpdatedFile");
    filesToDelete.add(testGeneratedUpdatedFile);
    testGeneratedUpdatedFile.deleteOnExit();
    FileUtils.writeLines(testGeneratedUpdatedFile, updatedLines);
    final long csumTestGeneratedFile = FileUtils.checksum(testGeneratedUpdatedFile, new CRC32()).getValue();
    final long csumActualFile = FileUtils.checksum(configFile, new CRC32()).getValue();
    assertEquals(csumTestGeneratedFile, csumActualFile);
}

From source file:com.jkoolcloud.tnt4j.streams.configure.state.AbstractFileStreamStateHandler.java

/**
 * Save current file access state. Actually takes the current streamed file line, and calculates CRC of that line.
 *
 * @param line/*from  w  w w.  j a  v a 2  s  .co m*/
 *            line currently streamed
 * @param streamName
 *            stream name
 */
public void saveState(AbstractFileLineStream.Line line, String streamName) {
    AbstractFileLineStream.Line procLine = prevLine;
    prevLine = line;
    if (procLine == null) {
        return;
    }

    String lineStr = procLine.getText();
    int lineNr = procLine.getLineNumber();

    try {
        fileAccessState.currentLineNumber = lineNr;
        fileAccessState.lastReadTime = System.currentTimeMillis();

        CRC32 crc = new CRC32();
        final byte[] bytes4Line = lineStr.getBytes(Utils.UTF8);
        crc.update(bytes4Line, 0, bytes4Line.length);
        fileAccessState.currentLineCrc = crc.getValue();
    } catch (IOException exc) {
        logger().log(OpLevel.ERROR, StreamsResources.getString(StreamsResources.RESOURCE_BUNDLE_NAME,
                "FileStreamStateHandler.file.error"), exc);
    }
}

From source file:io.hops.erasure_coding.Encoder.java

/**
 * Wraps around encodeStripeImpl in order to configure buffers.
 * Having buffers of the right size is extremely important. If the the
 * buffer size is not a divisor of the block size, we may end up reading
 * across block boundaries.//from  w  w  w .j a va  2 s. c  om
 */
void encodeStripe(FileSystem fs, Path sourceFile, Path parityFile, InputStream[] blocks, long blockSize,
        OutputStream[] outs, Progressable reporter, boolean computeBlockChecksum, int stripe, Path copyPath,
        OutputStream[] copyOuts) throws IOException {
    configureBuffers(blockSize);
    int boundedBufferCapacity = 1;
    ParallelStreamReader parallelReader = new ParallelStreamReader(reporter, blocks, bufSize, parallelism,
            boundedBufferCapacity, blockSize);
    parallelReader.start();

    Checksum[] sourceChecksums = null;
    Checksum[] parityChecksums = null;
    if (computeBlockChecksum) {
        sourceChecksums = new Checksum[codec.stripeLength];
        for (int i = 0; i < sourceChecksums.length; i++) {
            sourceChecksums[i] = new CRC32();
        }
        parityChecksums = new Checksum[codec.parityLength];
        for (int i = 0; i < parityChecksums.length; i++) {
            parityChecksums[i] = new CRC32();
        }
    }
    try {
        for (long encoded = 0; encoded < blockSize; encoded += bufSize) {
            ParallelStreamReader.ReadResult readResult = null;
            try {
                readResult = parallelReader.getReadResult();
            } catch (InterruptedException e) {
                throw new IOException("Interrupted while waiting for read result");
            }
            // Cannot tolerate any IO errors.
            IOException readEx = readResult.getException();
            if (readEx != null) {
                throw readEx;
            }

            if (computeBlockChecksum) {
                updateChecksums(sourceChecksums, readResult.readBufs);
            }
            if (copyOuts != null) {
                for (int i = 0; i < readResult.readBufs.length; i++) {
                    copyOuts[i].write(readResult.readBufs[i], 0, readResult.numRead[i]);
                }
            }
            code.encodeBulk(readResult.readBufs, writeBufs);
            reporter.progress();

            // Now that we have some data to write, send it to the temp files.
            for (int i = 0; i < codec.parityLength; i++) {
                outs[i].write(writeBufs[i], 0, bufSize);
                if (computeBlockChecksum) {
                    parityChecksums[i].update(writeBufs[i], 0, bufSize);
                }
                reporter.progress();
            }
        }
        DistributedFileSystem dfs = (DistributedFileSystem) (fs instanceof ErasureCodingFileSystem
                ? ((ErasureCodingFileSystem) fs).getFileSystem()
                : fs);
        sendChecksums(dfs, copyPath == null ? sourceFile : copyPath, sourceChecksums, stripe,
                codec.stripeLength);
        sendChecksums(dfs, parityFile, parityChecksums, stripe, codec.parityLength);
    } finally {
        parallelReader.shutdown();
    }
}