Example usage for java.util.zip CRC32 update

List of usage examples for java.util.zip CRC32 update

Introduction

In this page you can find the example usage for java.util.zip CRC32 update.

Prototype

@Override
public void update(ByteBuffer buffer) 

Source Link

Document

Updates the CRC-32 checksum with the bytes from the specified buffer.

Usage

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *//* www  .  ja v a2s . c  om*/
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining());
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    return graph;
}

From source file:org.tangram.components.CodeExporter.java

@LinkAction("/codes.zip")
public TargetDescriptor codes(HttpServletRequest request, HttpServletResponse response) throws IOException {
    if (!request.getRequestURI().endsWith(".zip")) {
        response.sendError(HttpServletResponse.SC_NOT_FOUND);
        return null;
    } // if//w w  w  .  ja  v  a  2 s. c  o  m
    if (request.getAttribute(Constants.ATTRIBUTE_ADMIN_USER) == null) {
        throw new IOException("User may not execute action");
    } // if

    long now = System.currentTimeMillis();

    response.setContentType("application/x-zip-compressed");

    CRC32 crc = new CRC32();

    ZipOutputStream zos = new ZipOutputStream(response.getOutputStream());
    zos.setComment("Tangram Repository Codes");
    zos.setLevel(9);
    Collection<CodeResource> codes = codeResourceCache.getCodes();
    for (CodeResource code : codes) {
        if (StringUtils.isNotBlank(code.getAnnotation())) {
            String mimeType = CodeHelper.getNormalizedMimeType(code.getMimeType());
            String folder = CodeHelper.getFolder(mimeType);
            String extension = CodeHelper.getExtension(mimeType);
            if (mimeType.startsWith("text/")) {
                byte[] bytes = code.getCodeText().getBytes("UTF-8");
                ZipEntry ze = new ZipEntry(folder + "/" + getFilename(code) + extension);
                ze.setTime(now);
                crc.reset();
                crc.update(bytes);
                ze.setCrc(crc.getValue());
                zos.putNextEntry(ze);
                zos.write(bytes);
                zos.closeEntry();
            } // if
        } // if
    } // for
    zos.finish();
    zos.close();

    return TargetDescriptor.DONE;
}

From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java

/**
 * Loads the optional pre-compiled graph entry from the given tar file.
 *
 * @return graph buffer, or {@code null} if one was not found
 * @throws IOException if the tar file could not be read
 *///from  ww w  .  ja  v  a  2s  .  co  m
private ByteBuffer loadGraph() throws IOException {
    // read the graph metadata just before the tar index entry
    int pos = access.length() - 2 * BLOCK_SIZE - getEntrySize(index.remaining() + 16);
    ByteBuffer meta = access.read(pos - 16, 16);
    int crc32 = meta.getInt();
    int count = meta.getInt();
    int bytes = meta.getInt();
    int magic = meta.getInt();

    if (magic != GRAPH_MAGIC) {
        return null; // magic byte mismatch
    }

    if (count < 0 || bytes < count * 16 + 16 || BLOCK_SIZE + bytes > pos) {
        log.warn("Invalid graph metadata in tar file {}", file);
        return null; // impossible uuid and/or byte counts
    }

    // this involves seeking backwards in the file, which might not
    // perform well, but that's OK since we only do this once per file
    ByteBuffer graph = access.read(pos - bytes, bytes);

    byte[] b = new byte[bytes - 16];
    graph.mark();
    graph.get(b);
    graph.reset();

    CRC32 checksum = new CRC32();
    checksum.update(b);
    if (crc32 != (int) checksum.getValue()) {
        log.warn("Invalid graph checksum in tar file {}", file);
        return null; // checksum mismatch
    }

    hasGraph = true;
    return graph;
}

From source file:org.apache.hadoop.raid.TestBlockCopier.java

private long[] createRandomFile(Path file, int repl, int numBlocks) throws IOException {

    long[] crcs = new long[numBlocks];
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(file, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, BLOCK_SIZE);
    // Write whole blocks.
    byte[] b = new byte[(int) BLOCK_SIZE];
    for (int i = 1; i < numBlocks; i++) {
        rand.nextBytes(b);/*from  w ww.  j a  va 2s  .co m*/
        stm.write(b);

        crc.update(b);
        crcs[i - 1] = crc.getValue();
        crc.reset();
    }
    // Write partial block.
    b = new byte[(int) BLOCK_SIZE / 2 - 1];
    rand.nextBytes(b);
    stm.write(b);
    crc.update(b);
    crcs[crcs.length - 1] = crc.getValue();

    stm.close();
    return crcs;//crc.getValue();
}

From source file:com.oneops.sensor.Sensor.java

/**
 * Adds the ci thresholds.//from w  w w .  j  av  a2 s  .  com
 *
 * @param ciId       the ci id
 * @param manifestId the manifest id
 * @param monitor    the monitor
 * @throws SensorException
 */
public void addCiThresholdsList(long ciId, long manifestId, List<CmsRfcCISimple> monitors)
        throws SensorException {

    if (!isInited || (manifestId % this.poolSize) != this.instanceId) {
        // this is not my manifestId will post it on mgmt queue for other guy to pick up
        throw new SensorException("Got Monitor request for the wrong instance - manifestId:" + manifestId
                + "; pool size:" + this.poolSize + "; my insatnceId:" + this.instanceId);
    }

    Set<String> processedMonitors = new HashSet<>();

    for (CmsRfcCISimple monitor : monitors) {

        if (monitor.getCiAttributes().containsKey("enable")
                && monitor.getCiAttributes().get("enable").equals("false")) {
            continue;
        }

        long checksum = 0;

        String thresholdsJson = monitor.getCiAttributes().get("thresholds");
        String source = monitor.getCiName();

        if (thresholdsJson != null) {
            CRC32 crc = new CRC32();
            String crcStr = thresholdsJson + monitor.getCiAttributes().get(HEARTBEAT)
                    + monitor.getCiAttributes().get(DURATION);
            crc.update(crcStr.getBytes());
            checksum = crc.getValue();
        } else {
            // need to clean up thresholds
            continue;
        }

        processedMonitors.add(source);

        //String key = manifestId + source;
        ThresholdStatements trStmt = loadedThresholds.containsKey(manifestId)
                ? loadedThresholds.get(manifestId).get(source)
                : null;
        if (trStmt == null) {
            //load stmts
            persistAndaddToEngine(ciId, manifestId, source, checksum, thresholdsJson,
                    monitor.getCiAttributes().get(HEARTBEAT).equals("true"),
                    monitor.getCiAttributes().get(DURATION));
        } else if (trStmt.getChecksum() != checksum
                || monitor.getCiAttributes().get(HEARTBEAT).equals("true") != trStmt.isHeartbeat()) {
            // if checksum is different we assume there was an monitor update
            // we need to remove old stmts and insert new ones
            // but before that lets insert fake event to clear out heart beats
            // if this new mon is not a heartbeat one
            if (!monitor.getCiAttributes().get(HEARTBEAT).equals("true")) {
                insertFakeEvent(ciId, manifestId, source);
            }
            for (String eplName : trStmt.getStmtNames()) {
                removeStmtFromEngine(manifestId, source, eplName);
            }
            loadedThresholds.get(manifestId).remove(source);

            persistAndaddToEngine(ciId, manifestId, source, checksum, thresholdsJson,
                    monitor.getCiAttributes().get(HEARTBEAT).equals("true"),
                    monitor.getCiAttributes().get(DURATION));
        }
    }
    // now we need to clean up the deleted monitors
    if (loadedThresholds.containsKey(manifestId)) {
        Set<String> monsToRemove = new HashSet<>();
        for (String loadedMon : loadedThresholds.get(manifestId).keySet()) {
            if (!processedMonitors.contains(loadedMon)) {
                //this is old monitor that need to be removed
                //insert fake event to shut down Heartbeat retrigger
                insertFakeEvent(ciId, manifestId, loadedMon);
                //and do it for the rest bom guys
                for (long ciMapedBomId : tsDao.getManifestCiIds(manifestId)) {
                    insertFakeEvent(ciMapedBomId, manifestId, loadedMon);
                }

                ThresholdStatements trStmt = loadedThresholds.get(manifestId).get(loadedMon);
                for (String eplName : trStmt.getStmtNames()) {
                    removeStmtFromEngine(manifestId, loadedMon, eplName);
                }
                monsToRemove.add(loadedMon);
                tsDao.removeManifestThreshold(manifestId, loadedMon);
            }
        }
        for (String monToRemove : monsToRemove) {
            loadedThresholds.get(manifestId).remove(monToRemove);
        }
    }
}

From source file:srebrinb.compress.sevenzip.SevenZFile.java

private Archive readHeaders(final byte[] password) throws IOException {
    ByteBuffer buf = ByteBuffer.allocate(12 /* signature + 2 bytes version + 4 bytes CRC */)
            .order(ByteOrder.LITTLE_ENDIAN);
    readFully(buf);/*from www. java2 s .  co m*/
    final byte[] signature = new byte[6];
    buf.get(signature);
    if (!Arrays.equals(signature, sevenZSignature)) {
        throw new IOException("Bad 7z signature");
    }
    // 7zFormat.txt has it wrong - it's first major then minor
    final byte archiveVersionMajor = buf.get();
    final byte archiveVersionMinor = buf.get();
    if (archiveVersionMajor != 0) {
        throw new IOException(
                String.format("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor));
    }

    final long startHeaderCrc = 0xffffFFFFL & buf.getInt();
    final StartHeader startHeader = readStartHeader(startHeaderCrc);

    final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
    if (nextHeaderSizeInt != startHeader.nextHeaderSize) {
        throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize);
    }
    channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
    buf = ByteBuffer.allocate(nextHeaderSizeInt).order(ByteOrder.LITTLE_ENDIAN);
    readFully(buf);
    final CRC32 crc = new CRC32();
    crc.update(buf.array());
    if (startHeader.nextHeaderCrc != crc.getValue()) {
        throw new IOException("NextHeader CRC mismatch");
    }

    Archive archive = new Archive();
    int nid = getUnsignedByte(buf);
    if (nid == NID.kEncodedHeader) {
        buf = readEncodedHeader(buf, archive, password);
        // Archive gets rebuilt with the new header
        archive = new Archive();
        nid = getUnsignedByte(buf);
    }
    if (nid == NID.kHeader) {
        readHeader(buf, archive);
    } else {
        throw new IOException("Broken or unsupported archive: no Header");
    }
    return archive;
}

From source file:bobs.is.compress.sevenzip.SevenZFile.java

private Archive readHeaders(final byte[] password) throws IOException {
    final byte[] signature = new byte[6];
    file.readFully(signature);//from   w w w  .ja  v a 2s .  c o  m
    if (!Arrays.equals(signature, sevenZSignature)) {
        throw new IOException("Bad 7z signature");
    }
    // 7zFormat.txt has it wrong - it's first major then minor
    final byte archiveVersionMajor = file.readByte();
    final byte archiveVersionMinor = file.readByte();
    if (archiveVersionMajor != 0) {
        throw new IOException(
                String.format("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor));
    }

    final long startHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(file.readInt());
    final StartHeader startHeader = readStartHeader(startHeaderCrc);

    final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
    if (nextHeaderSizeInt != startHeader.nextHeaderSize) {
        throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize);
    }
    file.seek(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
    final byte[] nextHeader = new byte[nextHeaderSizeInt];
    file.readFully(nextHeader);
    final CRC32 crc = new CRC32();
    crc.update(nextHeader);
    if (startHeader.nextHeaderCrc != crc.getValue()) {
        throw new IOException("NextHeader CRC mismatch");
    }

    final ByteArrayInputStream byteStream = new ByteArrayInputStream(nextHeader);
    DataInputStream nextHeaderInputStream = new DataInputStream(byteStream);
    Archive archive = new Archive();
    int nid = nextHeaderInputStream.readUnsignedByte();
    if (nid == NID.kEncodedHeader) {
        nextHeaderInputStream = readEncodedHeader(nextHeaderInputStream, archive, password);
        // Archive gets rebuilt with the new header
        archive = new Archive();
        nid = nextHeaderInputStream.readUnsignedByte();
    }
    if (nid == NID.kHeader) {
        readHeader(nextHeaderInputStream, archive);
        nextHeaderInputStream.close();
    } else {
        throw new IOException("Broken or unsupported archive: no Header");
    }
    return archive;
}

From source file:org.apache.hadoop.raid.TestBlockCopier.java

private long[] createRandomFileDispersed(Path file, int numBlocks, DatanodeDescriptor primaryNode,
        DatanodeDescriptor altNode) throws IOException, InterruptedException {

    BlockPlacementPolicyFakeData bp = BlockPlacementPolicyFakeData.lastInstance;
    DatanodeDescriptor tmp = bp.overridingDatanode;

    final int repl = 1;
    long[] crcs = new long[numBlocks];
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(file, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, BLOCK_SIZE);

    // Create the first block on the alt node
    bp.overridingDatanode = altNode;//from ww w  . ja  v  a 2s  . c o  m

    // fill random data into file
    final byte[] b = new byte[(int) BLOCK_SIZE];
    LOG.info("Writing first block (alt. host)");
    rand.nextBytes(b);
    stm.write(b);
    crc.update(b);
    crcs[0] = crc.getValue();

    stm.flush();
    Thread.sleep(1000); // What a hack. Le sigh.

    // Now we want to write on the altNode
    bp.overridingDatanode = primaryNode;

    // Write the rest of the blocks on primaryNode
    for (int i = 1; i < numBlocks; i++) {
        LOG.info("Writing block number " + i + " (primary host)");

        rand.nextBytes(b);
        stm.write(b);
        crc.reset();
        crc.update(b);
        crcs[i] = crc.getValue();
    }
    stm.close();
    Thread.sleep(1000);

    // Reset this guy
    bp.overridingDatanode = tmp;

    return crcs;
}

From source file:de.mpg.escidoc.services.dataacquisition.DataHandlerBean.java

/**
 * fetch data from a given url.//from  w w  w.  j  a v a2  s  . c  o m
 * 
 * @param url
 * @return byte[]
 * @throws SourceNotAvailableException
 * @throws RuntimeException
 * @throws AccessException
 */
public byte[] fetchMetadatafromURL(URL url)
        throws SourceNotAvailableException, RuntimeException, AccessException {
    byte[] input = null;
    URLConnection conn = null;
    Date retryAfter = null;
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ZipOutputStream zos = new ZipOutputStream(baos);
    try {
        conn = ProxyHelper.openConnection(url);
        HttpURLConnection httpConn = (HttpURLConnection) conn;
        int responseCode = httpConn.getResponseCode();
        switch (responseCode) {
        case 503:
            String retryAfterHeader = conn.getHeaderField("Retry-After");
            if (retryAfterHeader != null) {
                SimpleDateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
                retryAfter = dateFormat.parse(retryAfterHeader);
                this.logger.debug("Source responded with 503, retry after " + retryAfter + ".");
                throw new SourceNotAvailableException(retryAfter);
            }
            break;
        case 302:
            String alternativeLocation = conn.getHeaderField("Location");
            return fetchMetadatafromURL(new URL(alternativeLocation));
        case 200:
            this.logger.info("Source responded with 200.");
            // Fetch file
            GetMethod method = new GetMethod(url.toString());
            HttpClient client = new HttpClient();
            ProxyHelper.executeMethod(client, method);
            input = method.getResponseBody();
            httpConn.disconnect();
            // Create zip file with fetched file
            ZipEntry ze = new ZipEntry("unapi");
            ze.setSize(input.length);
            ze.setTime(this.currentDate());
            CRC32 crc321 = new CRC32();
            crc321.update(input);
            ze.setCrc(crc321.getValue());
            zos.putNextEntry(ze);
            zos.write(input);
            zos.flush();
            zos.closeEntry();
            zos.close();
            this.setContentType("application/zip");
            this.setFileEnding(".zip");
            break;
        case 403:
            throw new AccessException("Access to url " + url + " is restricted.");
        default:
            throw new RuntimeException("An error occurred during importing from external system: "
                    + responseCode + ": " + httpConn.getResponseMessage() + ".");
        }
    } catch (AccessException e) {
        this.logger.error("Access denied.", e);
        throw new AccessException(url.toString());
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    return baos.toByteArray();
}

From source file:org.apache.hadoop.raid.TestDirectoryRaidEncoder.java

private long createDirectoryFile(FileSystem fileSys, Path name, int repl, long[] fileSizes, long[] blockSizes,
        int[] seeds, long blockSize) throws IOException {
    CRC32 crc = new CRC32();
    assert fileSizes.length == blockSizes.length;
    assert fileSizes.length == seeds.length;
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blockSize);
    byte[] zeros = new byte[(int) (blockSize)];
    for (int j = 0; j < zeros.length; j++) {
        zeros[j] = 0;/* w ww .j ava2  s .  c  o m*/
    }
    // fill random data into file
    for (int i = 0; i < fileSizes.length; i++) {
        assert blockSizes[i] <= blockSize;
        byte[] b = new byte[(int) blockSizes[i]];
        long numBlocks = fileSizes[i] / blockSizes[i];
        Random rand = new Random(seeds[i]);
        for (int j = 0; j < numBlocks; j++) {
            rand.nextBytes(b);
            stm.write(b);
            crc.update(b);
            int zeroLen = (int) (blockSize - blockSizes[i]);
            stm.write(zeros, 0, zeroLen);
            crc.update(zeros, 0, zeroLen);
        }
        long lastBlock = fileSizes[i] - numBlocks * blockSizes[i];
        if (lastBlock > 0) {
            b = new byte[(int) lastBlock];
            rand.nextBytes(b);
            stm.write(b);
            crc.update(b);
            if (i + 1 < fileSizes.length) {
                // Not last block of file, write zero
                int zeroLen = (int) (blockSize - lastBlock);
                stm.write(zeros, 0, zeroLen);
                crc.update(zeros, 0, zeroLen);
            }
        }
    }
    stm.close();
    return crc.getValue();
}