Example usage for java.util.zip CRC32 CRC32

List of usage examples for java.util.zip CRC32 CRC32

Introduction

In this page you can find the example usage for java.util.zip CRC32 CRC32.

Prototype

public CRC32() 

Source Link

Document

Creates a new CRC32 object.

Usage

From source file:odml.core.Value.java

/**
 * Base64 encodes the content if it represents either a File, URL, URI, or String that can be converted to a file.
 *
 * @param content - the content that should be encoded.
 * @return encoded content as {@link String}
 *///from   www .  j ava  2  s . c om
private String encodeContent(Object content) {
    if (content == null) {
        return null;
    }
    System.out.println("Encoding content: " + content.toString());
    String encoded = null;
    File file;
    if (content instanceof String) {
        try {
            URI uri = new URI((String) content);
            file = new File(uri);
        } catch (Exception e) {
            return (String) content;
        }
    } else if (content instanceof URL) {
        try {
            file = new File(((URL) content).toURI());
        } catch (Exception e) {
            System.out.println("Could not create a file from the specified URL: " + content.toString());
            file = null;
        }
    } else if (content instanceof URI) {
        try {
            file = new File((URI) content);
        } catch (Exception e) {
            System.out.println("Could not create a file from the specified URI: " + content.toString());
            file = null;
        }
    } else if (content instanceof File) {
        file = (File) content;
    } else {
        System.out.println("Could not create a File from input! Class: " + content.getClass().getSimpleName()
                + " Content: " + content.toString());
        file = null;
    }
    if (file == null) {
        return "";
    }
    Base64 enc = new Base64();
    //the value has to be converted to String; if it is already just take it, if it is not
    //try different things 
    try {
        byte[] bytes = enc.encode(getBytesFromFile(file));
        CRC32 crc = new CRC32();
        crc.update(bytes);
        this.setChecksum("CRC32$" + crc.getValue());
        this.setFilename(file.getName());
        this.setEncoder("Base64");
        encoded = new String(bytes, "UTF-8");
    } catch (Exception e) {
        System.out.println("An error occurred during encoding: " + e.getLocalizedMessage());
    }
    return encoded;
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

@Test(enabled = true)
public void testMapValueBuffer() {
    DurableType gtypes[] = { DurableType.STRING, DurableType.BUFFER };
    DurableHashMap<String, DurableBuffer> map = DurableHashMapFactory.create(m_act, null, gtypes, 1, false);
    long bufVal;//from w ww .j  av a2  s .  co m

    Checksum bufferCheckSum = new CRC32();
    bufferCheckSum.reset();

    Long handler = map.getHandler();
    for (int i = 0; i < 10; i++) {
        map.put("buffer" + i, genuptBuffer(m_act, bufferCheckSum, genRandSize()));
    }

    bufVal = bufferCheckSum.getValue();

    bufferCheckSum.reset();
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = map.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    bufferCheckSum.reset();
    DurableHashMap<String, DurableBuffer> restoredMap = DurableHashMapFactory.restore(m_act, null, gtypes,
            handler, false);
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = restoredMap.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    restoredMap.destroy();
}

From source file:com.jivesoftware.os.amza.service.storage.WALStorage.java

private static long[] buildEndOfMergeMarker(long deltaWALId, long highestTxId, long oldestTimestamp,
        long oldestVersion, long oldestTombstonedTimestamp, long oldestTombstonedVersion, long keyCount,
        long clobberCount, long fpOfLastLeap, long updatesSinceLeap, long[] stripedKeyHighwaterTimestamps,
        int offset) {
    final long[] marker = new long[EOM_HIGHWATER_STRIPES_OFFSET + numKeyHighwaterStripes];
    marker[EOM_VERSION_INDEX] = 1; // version
    marker[EOM_CHECKSUM_INDEX] = 0; // placeholder checksum
    marker[EOM_DELTA_WAL_ID_INDEX] = deltaWALId;
    marker[EOM_HIGHEST_TX_ID_INDEX] = highestTxId;
    marker[EOM_OLDEST_TIMESTAMP_INDEX] = oldestTimestamp;
    marker[EOM_OLDEST_VERSION_INDEX] = oldestVersion;
    marker[EOM_OLDEST_TOMBSTONED_TIMESTAMP_INDEX] = oldestTombstonedTimestamp;
    marker[EOM_OLDEST_TOMBSTONED_VERSION_INDEX] = oldestTombstonedVersion;
    marker[EOM_KEY_COUNT_INDEX] = keyCount;
    marker[EOM_CLOBBER_COUNT_INDEX] = clobberCount;

    marker[EOM_FP_OF_LAST_LEAP_INDEX] = fpOfLastLeap;
    marker[EOM_UPDATES_SINCE_LAST_LEAP_INDEX] = updatesSinceLeap;

    System.arraycopy(stripedKeyHighwaterTimestamps, offset, marker, EOM_HIGHWATER_STRIPES_OFFSET,
            numKeyHighwaterStripes);/*w  ww  .  ja va  2  s  .  c  o m*/

    CRC32 crC32 = new CRC32();
    byte[] hintsAsBytes = UIO.longsBytes(marker);
    crC32.update(hintsAsBytes, 16, hintsAsBytes.length - 16); // 16 skips the version and checksum
    marker[EOM_CHECKSUM_INDEX] = crC32.getValue();
    return marker;
}

From source file:com.taobao.android.builder.tools.zip.ZipUtils.java

public static void rezip(File output, File srcDir, Map<String, ZipEntry> zipEntryMethodMap) throws Exception {
    if (output.isDirectory()) {
        throw new IOException("This is a directory!");
    }/*from  w  w  w  .  j  av  a2s.  c  om*/
    if (!output.getParentFile().exists()) {
        output.getParentFile().mkdirs();
    }

    if (!output.exists()) {
        output.createNewFile();
    }
    List fileList = getSubFiles(srcDir);
    ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(output));
    ZipEntry ze = null;
    byte[] buf = new byte[1024];
    int readLen = 0;
    for (int i = 0; i < fileList.size(); i++) {
        File f = (File) fileList.get(i);
        ze = new ZipEntry(getAbsFileName(srcDir.getPath(), f));
        ze.setSize(f.length());
        ze.setTime(f.lastModified());
        if (zipEntryMethodMap != null) {
            ZipEntry originEntry = zipEntryMethodMap.get(ze.getName());
            if (originEntry != null) {
                if (originEntry.getMethod() == STORED) {
                    ze.setCompressedSize(f.length());
                    InputStream in = new BufferedInputStream(new FileInputStream(f));
                    try {
                        CRC32 crc = new CRC32();
                        int c;
                        while ((c = in.read()) != -1) {
                            crc.update(c);
                        }
                        ze.setCrc(crc.getValue());
                    } finally {
                        in.close();
                    }
                }
                ze.setMethod(originEntry.getMethod());
            }
        }
        zos.putNextEntry(ze);
        InputStream is = new BufferedInputStream(new FileInputStream(f));
        while ((readLen = is.read(buf, 0, 1024)) != -1) {
            zos.write(buf, 0, readLen);
        }
        is.close();
    }
    zos.close();
}

From source file:org.apache.nifi.processors.standard.TailFile.java

/**
 * Updates member variables to reflect the "expected recovery checksum" and
 * seek to the appropriate location in the tailed file, updating our
 * checksum, so that we are ready to proceed with the
 * {@link #onTrigger(ProcessContext, ProcessSession)} call.
 *
 * @param context the ProcessContext//w ww .ja v  a2  s .  co  m
 * @param stateValues the values that were recovered from state that was
 * previously stored. This Map should be populated with the keys defined in
 * {@link TailFileState.StateKeys}.
 * @param filePath the file of the file for which state must be recovered
 * @throws IOException if unable to seek to the appropriate location in the
 * tailed file.
 */
private void recoverState(final ProcessContext context, final Map<String, String> stateValues,
        final String filePath) throws IOException {

    final String prefix = MAP_PREFIX + states.get(filePath).getFilenameIndex() + '.';

    if (!stateValues.containsKey(prefix + TailFileState.StateKeys.FILENAME)) {
        resetState(filePath);
        return;
    }
    if (!stateValues.containsKey(prefix + TailFileState.StateKeys.POSITION)) {
        resetState(filePath);
        return;
    }
    if (!stateValues.containsKey(prefix + TailFileState.StateKeys.TIMESTAMP)) {
        resetState(filePath);
        return;
    }
    if (!stateValues.containsKey(prefix + TailFileState.StateKeys.LENGTH)) {
        resetState(filePath);
        return;
    }

    final String checksumValue = stateValues.get(prefix + TailFileState.StateKeys.CHECKSUM);
    final boolean checksumPresent = (checksumValue != null);
    final String storedStateFilename = stateValues.get(prefix + TailFileState.StateKeys.FILENAME);
    final long position = Long.parseLong(stateValues.get(prefix + TailFileState.StateKeys.POSITION));
    final long timestamp = Long.parseLong(stateValues.get(prefix + TailFileState.StateKeys.TIMESTAMP));
    final long length = Long.parseLong(stateValues.get(prefix + TailFileState.StateKeys.LENGTH));

    FileChannel reader = null;
    File tailFile = null;

    if (checksumPresent && filePath.equals(storedStateFilename)) {
        states.get(filePath).setExpectedRecoveryChecksum(Long.parseLong(checksumValue));

        // We have an expected checksum and the currently configured filename is the same as the state file.
        // We need to check if the existing file is the same as the one referred to in the state file based on
        // the checksum.
        final Checksum checksum = new CRC32();
        final File existingTailFile = new File(storedStateFilename);
        if (existingTailFile.length() >= position) {
            try (final InputStream tailFileIs = new FileInputStream(existingTailFile);
                    final CheckedInputStream in = new CheckedInputStream(tailFileIs, checksum)) {
                StreamUtils.copy(in, new NullOutputStream(), states.get(filePath).getState().getPosition());

                final long checksumResult = in.getChecksum().getValue();
                if (checksumResult == states.get(filePath).getExpectedRecoveryChecksum()) {
                    // Checksums match. This means that we want to resume reading from where we left off.
                    // So we will populate the reader object so that it will be used in onTrigger. If the
                    // checksums do not match, then we will leave the reader object null, so that the next
                    // call to onTrigger will result in a new Reader being created and starting at the
                    // beginning of the file.
                    getLogger().debug(
                            "When recovering state, checksum of tailed file matches the stored checksum. Will resume where left off.");
                    tailFile = existingTailFile;
                    reader = FileChannel.open(tailFile.toPath(), StandardOpenOption.READ);
                    getLogger().debug("Created FileChannel {} for {} in recoverState",
                            new Object[] { reader, tailFile });

                    reader.position(position);
                } else {
                    // we don't seek the reader to the position, so our reader will start at beginning of file.
                    getLogger().debug(
                            "When recovering state, checksum of tailed file does not match the stored checksum. Will begin tailing current file from beginning.");
                }
            }
        } else {
            // fewer bytes than our position, so we know we weren't already reading from this file. Keep reader at a position of 0.
            getLogger().debug(
                    "When recovering state, existing file to tail is only {} bytes but position flag is {}; "
                            + "this indicates that the file has rotated. Will begin tailing current file from beginning.",
                    new Object[] { existingTailFile.length(), position });
        }

        states.get(filePath).setState(new TailFileState(filePath, tailFile, reader, position, timestamp, length,
                checksum, ByteBuffer.allocate(65536)));
    } else {
        resetState(filePath);
    }

    getLogger().debug("Recovered state {}", new Object[] { states.get(filePath).getState() });
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

@Test(enabled = true)
public void testMapValueChunk() {
    DurableType gtypes[] = { DurableType.STRING, DurableType.CHUNK };
    DurableHashMap<String, DurableChunk> map = DurableHashMapFactory.create(m_act, null, gtypes, 1, false);
    long chunkVal;

    Checksum chunkCheckSum = new CRC32();
    chunkCheckSum.reset();/*from w w  w.j  a  v a  2  s  .c  o  m*/

    Long handler = map.getHandler();
    for (int i = 0; i < 10; i++) {
        map.put("chunk" + i, genuptChunk(m_act, chunkCheckSum, genRandSize()));
    }

    chunkVal = chunkCheckSum.getValue();
    chunkCheckSum.reset();

    for (int i = 0; i < 10; i++) {
        DurableChunk<NonVolatileMemAllocator> dc = map.get("chunk" + i);
        for (int j = 0; j < dc.getSize(); ++j) {
            byte b = unsafe.getByte(dc.get() + j);
            chunkCheckSum.update(b);
        }
    }
    chunkVal = chunkCheckSum.getValue();
    Assert.assertEquals(chunkCheckSum.getValue(), chunkVal);

    chunkCheckSum.reset();
    DurableHashMap<String, DurableChunk> restoredMap = DurableHashMapFactory.restore(m_act, null, gtypes,
            handler, false);

    for (int i = 0; i < 10; i++) {
        DurableChunk<NonVolatileMemAllocator> dc = restoredMap.get("chunk" + i);
        for (int j = 0; j < dc.getSize(); ++j) {
            byte b = unsafe.getByte(dc.get() + j);
            chunkCheckSum.update(b);
        }
    }
    chunkVal = chunkCheckSum.getValue();
    Assert.assertEquals(chunkCheckSum.getValue(), chunkVal);

    restoredMap.destroy();
}

From source file:org.apache.hadoop.raid.Encoder.java

/**
 * Recovers a corrupt block in a parity file to an output stream.
 *
 * The encoder generates codec.parityLength parity blocks for a source file stripe.
 * Since there is only one output provided, some blocks are written out to
 * files before being written out to the output.
 *
 * @param blockSize The block size for the source/parity files.
 * @param out The destination for the reovered block.
 * @throws InterruptedException //from w w  w  .j av a 2  s.  c o  m
 */
private void encodeFileToStream(StripeReader sReader, long blockSize, FSDataOutputStream out, CRC32[] crcOuts,
        Progressable reporter) throws IOException, InterruptedException {
    OutputStream[] tmpOuts = new OutputStream[codec.parityLength];
    // One parity block can be written directly to out, rest to local files.
    tmpOuts[0] = out;
    File[] tmpFiles = new File[codec.parityLength - 1];
    for (int i = 0; i < codec.parityLength - 1; i++) {
        tmpFiles[i] = File.createTempFile("parity", "_" + i);
        LOG.info("Created tmp file " + tmpFiles[i]);
        tmpFiles[i].deleteOnExit();
    }
    int finishedParityBlockIdx = 0;
    List<Integer> errorLocations = new ArrayList<Integer>();
    try {
        // Loop over stripe
        boolean redo;
        while (sReader.hasNext()) {
            reporter.progress();
            StripeInputInfo stripeInputInfo = null;
            InputStream[] blocks = null;
            // Create input streams for blocks in the stripe.
            long currentStripeIdx = sReader.getCurrentStripeIdx();
            stripeInputInfo = sReader.getNextStripeInputs();
            // The offset of first temporary output stream
            long encodeStartOffset = out.getPos();
            int retry = 3;
            do {
                redo = false;
                retry--;
                try {
                    blocks = stripeInputInfo.getInputs();
                    CRC32[] curCRCOuts = new CRC32[codec.parityLength];

                    if (crcOuts != null) {
                        for (int i = 0; i < codec.parityLength; i++) {
                            crcOuts[finishedParityBlockIdx + i] = curCRCOuts[i] = new CRC32();
                        }
                    }
                    // Create output streams to the temp files.
                    for (int i = 0; i < codec.parityLength - 1; i++) {
                        tmpOuts[i + 1] = new FileOutputStream(tmpFiles[i]);
                    }
                    // Call the implementation of encoding.
                    encodeStripe(blocks, blockSize, tmpOuts, curCRCOuts, reporter, true, errorLocations);
                } catch (IOException e) {
                    if (out.getPos() > encodeStartOffset) {
                        // Partial data is already written, throw the exception
                        InjectionHandler.processEventIO(InjectionEvent.RAID_ENCODING_PARTIAL_STRIPE_ENCODED);
                        throw e;
                    }
                    // try to fix the missing block in the stripe using stripe store.
                    if ((e instanceof BlockMissingException || e instanceof ChecksumException)
                            && codec.isDirRaid) {
                        if (retry <= 0) {
                            throw e;
                        }
                        redo = true;
                        CorruptBlockReconstructor constructor = new CorruptBlockReconstructor(conf);

                        Set<Path> srcPaths = new HashSet<Path>();
                        for (int idx : errorLocations) {
                            Path srcPath = stripeInputInfo.getSrcPaths()[idx];
                            if (srcPath != null) {
                                srcPaths.add(srcPath);
                            }
                        }

                        for (Path srcPath : srcPaths) {
                            Decoder decoder = new Decoder(conf, codec);
                            decoder.connectToStore(srcPath);
                            LOG.info("In Encoding: try to reconstruct the file: " + srcPath);
                            // will throw exception if it fails to reconstruct the lost
                            // blocks.
                            constructor.processFile(srcPath, null, decoder, true, null);
                            LOG.info("In Encoding: finished to reconstruct the file: " + srcPath);
                        }
                    } else {
                        throw e;
                    }
                } finally {
                    if (blocks != null) {
                        RaidUtils.closeStreams(blocks);
                    }
                }
                if (redo) {
                    // rebuild the inputs.
                    stripeInputInfo = sReader.getStripeInputs(currentStripeIdx);
                }
            } while (redo);

            // Close output streams to the temp files and write the temp files
            // to the output provided.
            for (int i = 0; i < codec.parityLength - 1; i++) {
                tmpOuts[i + 1].close();
                tmpOuts[i + 1] = null;
                InputStream in = new FileInputStream(tmpFiles[i]);
                RaidUtils.copyBytes(in, out, writeBufs[i], blockSize);
                reporter.progress();
            }
            finishedParityBlockIdx += codec.parityLength;
        }
    } finally {
        for (int i = 0; i < codec.parityLength - 1; i++) {
            if (tmpOuts[i + 1] != null) {
                tmpOuts[i + 1].close();
            }
            tmpFiles[i].delete();
            LOG.info("Deleted tmp file " + tmpFiles[i]);
        }
    }
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static long createTestFile(FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize)
        throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blocksize);
    // fill random data into file
    final byte[] b = new byte[(int) blocksize];
    for (int i = 0; i < numBlocks; i++) {
        rand.nextBytes(b);/*from   w  w w  .  j  ava2 s .com*/
        stm.write(b);
        crc.update(b);
    }
    stm.close();
    return crc.getValue();
}

From source file:com.jivesoftware.os.amza.service.storage.WALStorage.java

private long[] loadEndOfMergeMarker(long deltaWALId, byte[] row) {
    long[] marker = UIO.bytesLongs(row);
    if (marker[EOM_VERSION_INDEX] != 1) {
        return null;
    }/*  w w w  . jav a 2s .  c o  m*/
    CRC32 crC32 = new CRC32();
    byte[] hintsAsBytes = UIO.longsBytes(marker);
    crC32.update(hintsAsBytes, 16, hintsAsBytes.length - 16); // 16 skips the version and checksum
    if (marker[EOM_CHECKSUM_INDEX] != crC32.getValue()) {
        return null;
    }
    if (deltaWALId > -1 && marker[EOM_DELTA_WAL_ID_INDEX] >= deltaWALId) {
        return null;
    }
    return marker;

}

From source file:org.openbravo.erpCommon.obps.ActivationKey.java

public String getOpsLogId() {
    CRC32 crc = new CRC32();
    crc.update(getPublicKey().getBytes());
    return Long.toHexString(crc.getValue());
}