Example usage for java.util.zip CRC32 CRC32

List of usage examples for java.util.zip CRC32 CRC32

Introduction

In this page you can find the example usage for java.util.zip CRC32 CRC32.

Prototype

public CRC32() 

Source Link

Document

Creates a new CRC32 object.

Usage

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static long createTestFile(FileSystem fileSys, Path name, int repl, long fileSize, long blockSize,
        int seed) throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random(seed);
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blockSize);
    LOG.info("create file " + name + " size: " + fileSize + " blockSize: " + blockSize + " repl: " + repl);
    // fill random data into file
    byte[] b = new byte[(int) blockSize];
    long numBlocks = fileSize / blockSize;
    for (int i = 0; i < numBlocks; i++) {
        rand.nextBytes(b);/*from  w w w . j a  v  a2s.c o m*/
        stm.write(b);
        crc.update(b);
    }
    long lastBlock = fileSize - numBlocks * blockSize;
    if (lastBlock > 0) {
        b = new byte[(int) lastBlock];
        rand.nextBytes(b);
        stm.write(b);
        crc.update(b);
    }
    stm.close();
    return crc.getValue();
}

From source file:org.apache.hadoop.raid.TestRaidNode.java

static long createOldFile(FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize)
        throws IOException {
    CRC32 crc = new CRC32();
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blocksize);
    // fill random data into file
    byte[] b = new byte[(int) blocksize];
    for (int i = 0; i < numBlocks; i++) {
        if (i == (numBlocks - 1)) {
            b = new byte[(int) blocksize / 2];
        }/*w ww. ja  v  a2 s .c  om*/
        rand.nextBytes(b);
        stm.write(b);
        crc.update(b);
    }

    stm.close();
    return crc.getValue();
}

From source file:org.cloudata.core.commitlog.CommitLogServer.java

public long getChecksum(String dirName) throws IOException {
    File logFile = getLogFile(dirName);

    long fileLength = logFile.length();

    CheckedInputStream cksumIn = new CheckedInputStream(new FileInputStream(logFile), new CRC32());
    BufferedInputStream in = new BufferedInputStream(cksumIn, 8192);

    for (long i = 0; i < fileLength; i++) {
        in.read();//from  w  ww  .ja va2 s . co  m
    }

    return cksumIn.getChecksum().getValue();
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static long createTestFilePartialLastBlock(FileSystem fileSys, Path name, int repl, int numBlocks,
        long blocksize) throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blocksize);
    // Write whole blocks.
    byte[] b = new byte[(int) blocksize];
    for (int i = 1; i < numBlocks; i++) {
        rand.nextBytes(b);//w  w  w  . j  a v  a 2s.c o m
        stm.write(b);
        crc.update(b);
    }
    // Write partial block.
    b = new byte[(int) blocksize / 2 - 1];
    rand.nextBytes(b);
    stm.write(b);
    crc.update(b);

    stm.close();
    return crc.getValue();
}

From source file:cn.sinobest.jzpt.framework.utils.string.StringUtils.java

/**
 * CRC,8//from   w w w .j  ava 2 s . c o m
 */
public static String getCRC(InputStream in) {
    CRC32 crc32 = new CRC32();
    byte[] b = new byte[4096];
    int len = 0;
    try {
        while ((len = in.read(b)) != -1) {
            crc32.update(b, 0, len);
        }
        return Long.toHexString(crc32.getValue());
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        IOUtils.closeQuietly(in);
    }
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

static long bufferCRC(byte[] buf) {
    CRC32 crc = new CRC32();
    crc.update(buf, 0, buf.length);
    return crc.getValue();
}

From source file:org.apache.hadoop.raid.TestRaidNode.java

private void validateFile(FileSystem fileSys, Path name1, Path name2, long crc) throws IOException {

    FileStatus stat1 = fileSys.getFileStatus(name1);
    FileStatus stat2 = fileSys.getFileStatus(name2);
    assertTrue(" Length of file " + name1 + " is " + stat1.getLen() + " is different from length of file "
            + name1 + " " + stat2.getLen(), stat1.getLen() == stat2.getLen());

    CRC32 newcrc = new CRC32();
    FSDataInputStream stm = fileSys.open(name2);
    final byte[] b = new byte[4192];
    int num = 0;// w  ww  . j av  a  2 s  . c o  m
    while (num >= 0) {
        num = stm.read(b);
        if (num < 0) {
            break;
        }
        newcrc.update(b, 0, num);
    }
    stm.close();
    if (newcrc.getValue() != crc) {
        fail("CRC mismatch of files " + name1 + " with file " + name2);
    }
}

From source file:eu.europa.esig.dss.asic.signature.ASiCService.java

private ZipEntry getZipEntryMimeType(final byte[] mimeTypeBytes) {

    final ZipEntry entryMimetype = new ZipEntry(ZIP_ENTRY_MIMETYPE);
    entryMimetype.setMethod(ZipEntry.STORED);
    entryMimetype.setSize(mimeTypeBytes.length);
    entryMimetype.setCompressedSize(mimeTypeBytes.length);
    final CRC32 crc = new CRC32();
    crc.update(mimeTypeBytes);//  www.  j av a2s  .  c o m
    entryMimetype.setCrc(crc.getValue());
    return entryMimetype;
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static boolean validateFile(FileSystem fileSys, Path name, long length, long crc) throws IOException {

    long numRead = 0;
    CRC32 newcrc = new CRC32();
    FSDataInputStream stm = fileSys.open(name);
    final byte[] b = new byte[4192];
    int num = 0;/*from ww w. j  a  v a 2 s .co m*/
    while (num >= 0) {
        num = stm.read(b);
        if (num < 0) {
            break;
        }
        numRead += num;
        newcrc.update(b, 0, num);
    }
    stm.close();

    if (numRead != length) {
        LOG.info("Number of bytes read " + numRead + " does not match file size " + length);
        return false;
    }

    LOG.info(" Newcrc " + newcrc.getValue() + " old crc " + crc);
    if (newcrc.getValue() != crc) {
        LOG.info("CRC mismatch of file " + name + ": " + newcrc.getValue() + " vs. " + crc);
        return false;
    }
    return true;
}

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *//*from   www  . ja  v a  2 s  .c om*/
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining());
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    return graph;
}