Example usage for java.util.zip CRC32 CRC32

List of usage examples for java.util.zip CRC32 CRC32

Introduction

In this page you can find the example usage for java.util.zip CRC32 CRC32.

Prototype

public CRC32() 

Source Link

Document

Creates a new CRC32 object.

Usage

From source file:de.alpharogroup.file.checksum.ChecksumUtils.java

/**
 * Gets the checksum from the given file. If the flag crc is true than the CheckedInputStream is
 * constructed with an instance of <code>java.util.zip.CRC32</code> otherwise with an instance
 * of <code>java.util.zip.Adler32</code>.
 *
 * @param file// w  w w . j a  v a  2 s  .co  m
 *            The file The file from what to get the checksum.
 * @param crc
 *            The crc If the flag crc is true than the CheckedInputStream is constructed with an
 *            instance of {@link java.util.zip.CRC32} object otherwise it is constructed with an
 *            instance of
 * @return The checksum from the given file as long.
 * @throws FileNotFoundException
 *             Is thrown if the file is not found.
 * @throws IOException
 *             Signals that an I/O exception has occurred. {@link java.util.zip.CRC32} object
 *             otherwise it is constructed with an instance of {@link java.util.zip.Adler32}
 *             object. {@link java.util.zip.Adler32} object.
 */
public static long getChecksum(final File file, final boolean crc) throws FileNotFoundException, IOException {
    CheckedInputStream cis = null;
    if (crc) {
        cis = new CheckedInputStream(new FileInputStream(file), new CRC32());
    } else {
        cis = new CheckedInputStream(new FileInputStream(file), new Adler32());
    }
    final int length = (int) file.length();
    final byte[] buffer = new byte[length];
    long checksum = 0;
    while (cis.read(buffer) >= 0) {
        checksum = cis.getChecksum().getValue();
    }
    checksum = cis.getChecksum().getValue();
    StreamUtils.closeInputStream(cis);
    return checksum;
}

From source file:VASSAL.tools.io.ZipArchive.java

/**
 * Gets an {@link OutputStream} to write to the given file.
 *
 * <b>Note:</b> It is imperative the that calling code ensures that this
 * stream is eventually closed, since the returned stream holds a write
 * lock on the archive.//from   w w  w.  j  a  v  a 2  s  .c  o m
 *
 * @param path the path to the file in the archive
 * @param compress whether to compress the file
 * @return an <code>OutputStream</code> for the requested file
 * @throws IOException
 */
public OutputStream getOutputStream(String path, boolean compress) throws IOException {
    w.lock();
    try {
        openIfClosed();

        modified = true;

        // update the entries map
        Entry e = entries.get(path);
        if (e == null) {
            e = new Entry(null, null);
            entries.put(path, e);
        }

        // set up new ZipEntry
        final ZipEntry ze = new ZipEntry(path);
        ze.setMethod(compress ? ZipEntry.DEFLATED : ZipEntry.STORED);
        e.ze = ze;

        // clean up old temp file
        if (e.file != null) {
            e.file.delete();
        }

        // create new temp file
        e.file = TempFileManager.getInstance().createTempFile("zip", ".tmp");

        return new ZipArchiveOutputStream(new FileOutputStream(e.file), new CRC32(), e.ze);
    } catch (IOException ex) {
        w.unlock();
        throw ex;
    }
}

From source file:de.alpharogroup.file.checksum.ChecksumExtensions.java

/**
 * Gets the checksum from the given file. If the flag crc is true than the CheckedInputStream is
 * constructed with an instance of <code>java.util.zip.CRC32</code> otherwise with an instance
 * of <code>java.util.zip.Adler32</code>.
 *
 * @param file/*from ww  w .ja v a  2  s .com*/
 *            The file The file from what to get the checksum.
 * @param crc
 *            The crc If the flag crc is true than the CheckedInputStream is constructed with an
 *            instance of {@link java.util.zip.CRC32} object otherwise it is constructed with an
 *            instance of
 * @return The checksum from the given file as long.
 * @throws FileNotFoundException
 *             Is thrown if the file is not found.
 * @throws IOException
 *             Signals that an I/O exception has occurred. {@link java.util.zip.CRC32} object
 *             otherwise it is constructed with an instance of {@link java.util.zip.Adler32}
 *             object. {@link java.util.zip.Adler32} object.
 */
public static long getChecksum(final File file, final boolean crc) throws FileNotFoundException, IOException {
    CheckedInputStream cis = null;
    if (crc) {
        cis = new CheckedInputStream(new FileInputStream(file), new CRC32());
    } else {
        cis = new CheckedInputStream(new FileInputStream(file), new Adler32());
    }
    final int length = (int) file.length();
    final byte[] buffer = new byte[length];
    long checksum = 0;
    while (cis.read(buffer) >= 0) {
        checksum = cis.getChecksum().getValue();
    }
    checksum = cis.getChecksum().getValue();
    StreamExtensions.closeInputStream(cis);
    return checksum;
}

From source file:org.apache.hadoop.raid.SmokeTestThread.java

@Override
public Boolean call() throws Exception {
    Path testPath = null;//w  w  w  .ja  v a 2  s.  co m
    try {
        fileSys = FileSystem.get(distRaidNode.getConf());
        // Create a small file with 3 blocks
        String testFile = testFileBase + rand.nextLong();
        testPath = new Path(testFile);
        if (fileSys.exists(testPath)) {
            fileSys.delete(testPath, true);
        }
        long blockSize = BLOCK_SIZE;
        FSDataOutputStream stm = fileSys.create(testPath, true,
                fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) 3, blockSize);
        // Write 3 blocks.
        byte[] b = new byte[(int) blockSize];
        for (int i = 0; i < NUM_SOURCE_BLOCKS; i++) {
            rand.nextBytes(b);
            stm.write(b);
            checksum.update(b);
        }
        stm.close();
        LOG.info(
                "[SMOKETEST] Created a test file: " + testFile + " with CRC32 checksum " + checksum.getValue());
        PolicyInfo info = new PolicyInfo(testFile, distRaidNode.getConf());
        info.setCodecId(TEST_CODEC);
        info.setSrcPath(testFileDirectory);
        info.setShouldRaid(true);
        info.setProperty("modTimePeriod", "0");
        info.setProperty("targetReplication", "1");
        info.setProperty("metaReplication", "1");
        FileStatus stat = fileSys.getFileStatus(testPath);
        ArrayList<FileStatus> fstats = new ArrayList<FileStatus>();
        fstats.add(stat);
        // Raid it using rs
        DistRaid dr = DistRaidNode.raidFiles(distRaidNode.getConf(), distRaidNode.jobMonitor, fstats, info);
        LOG.info("[SMOKETEST] RS Raid test file: " + testFile);
        if (dr == null) {
            throw new IOException("Failed to sart a raiding job");
        }
        long startTime = System.currentTimeMillis();
        while (!dr.checkComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!dr.checkComplete()) {
            throw new IOException("Failed to finish the raiding job in " + (timeOut / 1000) + " seconds");
        }
        if (!dr.successful()) {
            throw new IOException("Failed to raid the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish raiding test file: " + testFile);
        // Verify parity file exists
        Codec codec = Codec.getCodec(TEST_CODEC);
        Path parityPath = new Path(codec.getParityPrefix(), RaidNode.makeRelative(testPath));
        FileStatus parityStat = fileSys.getFileStatus(parityPath);
        long numParityBlocks = RaidNode.numBlocks(parityStat);
        long expectedNumParityBlocks = RaidNode.numStripes(NUM_SOURCE_BLOCKS, codec.stripeLength)
                * codec.parityLength;
        if (numParityBlocks != expectedNumParityBlocks
                || parityStat.getLen() != expectedNumParityBlocks * BLOCK_SIZE) {
            throw new IOException("[SMOKETEST] Parity file " + parityPath + " has " + numParityBlocks
                    + " blocks and " + parityStat.getLen() + " bytes, but we expect " + expectedNumParityBlocks
                    + " blocks and " + (expectedNumParityBlocks * BLOCK_SIZE) + " bytes");
        }
        LOG.info("[SMOKETEST] Verification of parity file " + parityPath + " succeeded");
        LocatedBlock[] blocks = new LocatedBlock[1];
        LocatedBlocks lbs = ((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE);
        // Corrupt the first block
        blocks[0] = lbs.get(0);
        ((DistributedFileSystem) fileSys).getClient().reportBadBlocks(blocks);
        LOG.info("[SMOKETEST] Finish corrupting the first block " + lbs.get(0).getBlock());
        // submit a job to "fix" it
        Set<String> jobFiles = new HashSet<String>();
        jobFiles.add(testFile);
        Job job = DistBlockIntegrityMonitor.startOneJob(
                (DistBlockIntegrityMonitor.Worker) distRaidNode.blockIntegrityMonitor.getCorruptionMonitor(),
                Priority.HIGH, jobFiles, System.currentTimeMillis(), new AtomicLong(0),
                new AtomicLong(System.currentTimeMillis()), Integer.MAX_VALUE);
        startTime = System.currentTimeMillis();
        while (!job.isComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!job.isComplete()) {
            throw new IOException("Failed to finish the blockfixing job in " + (timeOut / 1000) + " seconds");
        }
        if (!job.isSuccessful()) {
            throw new IOException("Failed to fix the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish blockfixing test file: " + testFile);
        // wait for block is reported
        startTime = System.currentTimeMillis();
        while (((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE).get(0)
                .isCorrupt() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        CRC32 newChk = new CRC32();
        FSDataInputStream readStm = fileSys.open(testPath);
        int num = 0;
        while (num >= 0) {
            num = readStm.read(b);
            if (num < 0) {
                break;
            }
            newChk.update(b, 0, num);
        }
        stm.close();
        if (newChk.getValue() != checksum.getValue()) {
            throw new IOException(
                    "Fixed file's checksum " + newChk.getValue() + " != original one " + checksum.getValue());
        }
        LOG.info("[SMOKETEST] Verification of fixed test file: " + testFile);
        return true;
    } catch (IOException ex) {
        LOG.error("Get IOException in SmokeTestThread", ex);
        ioe = ex;
        return false;
    } catch (Throwable ex) {
        LOG.error("Get Error in SmokeTestThread", ex);
        ioe = new IOException(ex);
        return false;
    } finally {
        try {
            if (fileSys != null) {
                fileSys.delete(testPath, true);
            }
        } catch (IOException ioe) {
            LOG.error("Get error during deletion", ioe);
        }
    }
}

From source file:uk.ac.cam.cl.dtg.isaac.dos.eventbookings.PgEventBookings.java

/**
 * Release a globally unique database lock.
 * This method will release a previously acquired lock.
 *
 * @param resourceId - the unique id for the object to be locked.
 *///from ww w  .  j  a v a 2  s . c  o m
@Override
public void releaseDistributedLock(final String resourceId) throws SegueDatabaseException {

    // generate 32 bit CRC based on table id and resource id so that is is more likely to be unique globally.
    CRC32 crc = new CRC32();
    crc.update((TABLE_NAME + resourceId).getBytes());

    // acquire lock
    try (Connection conn = ds.getDatabaseConnection()) {
        PreparedStatement pst;
        pst = conn.prepareStatement("SELECT pg_advisory_unlock(?)");
        pst.setLong(1, crc.getValue());
        log.debug(String.format("Releasing advisory lock on %s (%s)", TABLE_NAME + resourceId, crc.getValue()));
        pst.executeQuery();
    } catch (SQLException e) {
        String msg = String.format("Unable to release lock for event (%s).", resourceId);
        log.error(msg);
        throw new SegueDatabaseException(msg);
    }
    log.debug(String.format("Released advisory lock on %s (%s)", TABLE_NAME + resourceId, crc.getValue()));
}

From source file:org.exist.xquery.modules.compression.AbstractCompressFunction.java

/**
 * Adds a element to a archive/*from w  ww.jav a2s . c  om*/
 *
 * @param os
 *            The Output Stream to add the element to
 * @param file
 *            The file to add to the archive
 * @param useHierarchy
 *            Whether to use a folder hierarchy in the archive file that
 *            reflects the collection hierarchy
 */
private void compressFile(OutputStream os, File file, boolean useHierarchy, String stripOffset, String method,
        String name) throws IOException {

    if (file.isFile()) {

        // create an entry in the Tar for the document
        Object entry = null;
        byte[] value = new byte[0];
        CRC32 chksum = new CRC32();
        ByteArrayOutputStream baos = new ByteArrayOutputStream();

        if (name != null) {
            entry = newEntry(name);
        } else if (useHierarchy) {
            entry = newEntry(removeLeadingOffset(file.getPath(), stripOffset));
        } else {
            entry = newEntry(file.getName());
        }

        InputStream is = new FileInputStream(file);
        byte[] data = new byte[16384];
        int len = 0;
        while ((len = is.read(data, 0, data.length)) > 0) {
            baos.write(data, 0, len);
        }
        is.close();
        value = baos.toByteArray();
        // close the entry
        if (entry instanceof ZipEntry && "store".equals(method)) {
            ((ZipEntry) entry).setMethod(ZipOutputStream.STORED);
            chksum.update(value);
            ((ZipEntry) entry).setCrc(chksum.getValue());
            ((ZipEntry) entry).setSize(value.length);
        }

        putEntry(os, entry);
        os.write(value);
        closeEntry(os);

    } else {

        for (String i : file.list()) {
            compressFile(os, new File(file, i), useHierarchy, stripOffset, method, null);
        }

    }

}

From source file:io.hops.erasure_coding.Decoder.java

/**
 * Having buffers of the right size is extremely important. If the the
 * buffer size is not a divisor of the block size, we may end up reading
 * across block boundaries./*w w w. j a v  a  2  s  . c  o m*/
 */
void fixErasedBlock(FileSystem srcFs, Path srcFile, FileSystem parityFs, Path parityFile, boolean fixSource,
        long blockSize, long errorOffset, long limit, boolean partial, OutputStream out, Mapper.Context context,
        boolean skipVerify, long oldCrc) throws IOException, InterruptedException {

    // TODO This causes a NullPointerException and it didn't seem to be required
    //    configureBuffers(blockSize);
    Progressable reporter = context;
    if (reporter == null) {
        reporter = RaidUtils.NULL_PROGRESSABLE;
    }

    CRC32 crc = new CRC32();
    fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize, errorOffset, limit, partial,
            out, reporter, crc);
    if (crc.getValue() != oldCrc) {
        throw new BlockChecksumException(
                String.format("Repair of %s at offset %d failed. Checksum differs from stored checksum",
                        fixSource ? srcFile : parityFile, errorOffset));
    }
}

From source file:org.pentaho.di.trans.steps.checksum.CheckSum.java

private Long calculCheckSum(Object[] r) throws Exception {
    Long retval;//from  w  w w.  j  a  va2  s .com
    StringBuffer Buff = new StringBuffer();

    // Loop through fields
    for (int i = 0; i < data.fieldnr; i++) {
        String fieldvalue = getInputRowMeta().getString(r, data.fieldnrs[i]);
        Buff.append(fieldvalue);
    }

    if (meta.getCheckSumType().equals(CheckSumMeta.TYPE_CRC32)) {
        CRC32 crc32 = new CRC32();
        crc32.update(Buff.toString().getBytes());
        retval = new Long(crc32.getValue());
    } else {
        Adler32 adler32 = new Adler32();
        adler32.update(Buff.toString().getBytes());
        retval = new Long(adler32.getValue());
    }

    return retval;
}

From source file:com.webpagebytes.cms.local.WPBLocalFileStorage.java

public void storeFile(InputStream is, WPBFilePath file) throws IOException {
    String fullFilePath = getLocalFullDataPath(file);
    OutputStream fos = createStorageOutputStream(fullFilePath);
    byte[] buffer = new byte[4096];
    CRC32 crc = new CRC32();
    MessageDigest md = null;/*from w  ww  . ja v  a2  s.c om*/
    try {
        md = MessageDigest.getInstance("MD5");
    } catch (NoSuchAlgorithmException e) {
        IOUtils.closeQuietly(fos);
        throw new IOException("Cannot calculate md5 to store the file", e);
    }

    int count = 0;
    int size = 0;
    while ((count = is.read(buffer)) != -1) {
        size += count;
        fos.write(buffer, 0, count);
        crc.update(buffer, 0, count);
        md.update(buffer, 0, count);
    }

    IOUtils.closeQuietly(fos);

    Properties props = new Properties();
    props.put("path", file.getPath());
    props.put("contentType", "application/octet-stream");
    props.put("crc32", String.valueOf(crc.getValue()));
    props.put("md5", DatatypeConverter.printBase64Binary(md.digest()));
    props.put("creationTime",
            String.valueOf(Calendar.getInstance(TimeZone.getTimeZone("GMT")).getTime().getTime()));
    props.put("size", String.valueOf(size));

    String metaPath = getLocalFullMetaPath(file);
    storeFileProperties(props, metaPath);

}

From source file:org.apache.hadoop.hdfs.TestLookasideCache.java

private static long createTestFile(FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize)
        throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blocksize);
    // fill random data into file
    final byte[] b = new byte[(int) blocksize];
    for (int i = 0; i < numBlocks; i++) {
        rand.nextBytes(b);/*from w  w  w  . j  a v  a 2 s . c o m*/
        stm.write(b);
        crc.update(b);
    }
    stm.close();
    return crc.getValue();
}