Example usage for java.util.zip Checksum getValue

List of usage examples for java.util.zip Checksum getValue

Introduction

In this page you can find the example usage for java.util.zip Checksum getValue.

Prototype

public long getValue();

Source Link

Document

Returns the current checksum value.

Usage

From source file:com.nridge.connector.common.con_com.crawl.CrawlQueue.java

private long nextCrawlId() {
    UUID uniqueId = UUID.randomUUID();
    byte idBytes[] = uniqueId.toString().getBytes();
    Checksum checksumValue = new CRC32();
    checksumValue.update(idBytes, 0, idBytes.length);

    return checksumValue.getValue();
}

From source file:com.cisco.dvbu.ps.deploytool.services.RegressionManagerUtils.java

/**
 * append the checksum value for the entire query to the end of resource URL.
 *   Eliminate any double quote "\"" characters from the URL.
 * //from ww  w.  j av  a2 s. co  m
 * Examples:
 *   incoming from clause                                  outgoing result
 *   -----------------------                              ----------------
 *   CAT1.SCH1.ViewSales                              --> CAT1.SCH1.ViewSales_1717783081
 *   
 * @param query
 * @param resourceURL
 * @return resourceURL
 */
public static String appendUrlChecksum(String query, String resourceURL) {
    /* 2015-07-06 mtinius - Adding a checksum to the URL allows for unique identification of queries that invoke the same table. 
     * 2015-10-13 mtinius - Moved this code to a separate method from getTableUrl() as it was interfering with the FUNCTIONAL test.
     * */
    // Calculate the CRC for the string to produce a unique identifier
    Checksum checksum = new CRC32();
    long currentLineCheckSumValue = 0L;
    // Make sure there are no line feeds, carriage returns or double spaces in the query.
    String queryTmp = query.replace("\n", " ").replaceAll("\r", " ").trim().replaceAll("  ", " ");
    byte bytes[] = queryTmp.getBytes();
    checksum.reset();
    checksum.update(bytes, 0, bytes.length);
    currentLineCheckSumValue = checksum.getValue();

    // Rewrite the resource URL to include the query checksum value and make sure there are no double quote "\"" characters present.
    resourceURL = resourceURL.replaceAll("\"", "") + "_" + currentLineCheckSumValue;

    return resourceURL;
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

@Test(enabled = true)
public void testMapValueBuffer() {
    DurableType gtypes[] = { DurableType.STRING, DurableType.BUFFER };
    DurableHashMap<String, DurableBuffer> map = DurableHashMapFactory.create(m_act, null, gtypes, 1, false);
    long bufVal;//ww  w. j av a  2s . co m

    Checksum bufferCheckSum = new CRC32();
    bufferCheckSum.reset();

    Long handler = map.getHandler();
    for (int i = 0; i < 10; i++) {
        map.put("buffer" + i, genuptBuffer(m_act, bufferCheckSum, genRandSize()));
    }

    bufVal = bufferCheckSum.getValue();

    bufferCheckSum.reset();
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = map.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    bufferCheckSum.reset();
    DurableHashMap<String, DurableBuffer> restoredMap = DurableHashMapFactory.restore(m_act, null, gtypes,
            handler, false);
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = restoredMap.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    restoredMap.destroy();
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

@Test(enabled = true)
public void testMapValueChunk() {
    DurableType gtypes[] = { DurableType.STRING, DurableType.CHUNK };
    DurableHashMap<String, DurableChunk> map = DurableHashMapFactory.create(m_act, null, gtypes, 1, false);
    long chunkVal;

    Checksum chunkCheckSum = new CRC32();
    chunkCheckSum.reset();/*from  www . j  av a2  s .c o  m*/

    Long handler = map.getHandler();
    for (int i = 0; i < 10; i++) {
        map.put("chunk" + i, genuptChunk(m_act, chunkCheckSum, genRandSize()));
    }

    chunkVal = chunkCheckSum.getValue();
    chunkCheckSum.reset();

    for (int i = 0; i < 10; i++) {
        DurableChunk<NonVolatileMemAllocator> dc = map.get("chunk" + i);
        for (int j = 0; j < dc.getSize(); ++j) {
            byte b = unsafe.getByte(dc.get() + j);
            chunkCheckSum.update(b);
        }
    }
    chunkVal = chunkCheckSum.getValue();
    Assert.assertEquals(chunkCheckSum.getValue(), chunkVal);

    chunkCheckSum.reset();
    DurableHashMap<String, DurableChunk> restoredMap = DurableHashMapFactory.restore(m_act, null, gtypes,
            handler, false);

    for (int i = 0; i < 10; i++) {
        DurableChunk<NonVolatileMemAllocator> dc = restoredMap.get("chunk" + i);
        for (int j = 0; j < dc.getSize(); ++j) {
            byte b = unsafe.getByte(dc.get() + j);
            chunkCheckSum.update(b);
        }
    }
    chunkVal = chunkCheckSum.getValue();
    Assert.assertEquals(chunkCheckSum.getValue(), chunkVal);

    restoredMap.destroy();
}

From source file:com.splout.db.dnode.HttpFileExchanger.java

public void send(final String tablespace, final int partition, final long version, final File binaryFile,
        final String url, boolean blockUntilComplete) {
    Future<?> future = clientExecutors.submit(new Runnable() {
        @Override/*  www .j  a v a2 s  . c o m*/
        public void run() {
            DataOutputStream writer = null;
            InputStream input = null;
            try {
                HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
                connection.setChunkedStreamingMode(config.getInt(FetcherProperties.DOWNLOAD_BUFFER));
                connection.setDoOutput(true);
                connection.setRequestProperty("filename", binaryFile.getName());
                connection.setRequestProperty("tablespace", tablespace);
                connection.setRequestProperty("partition", partition + "");
                connection.setRequestProperty("version", version + "");

                Checksum checkSum = new CRC32();

                writer = new DataOutputStream(new GZIPOutputStream(connection.getOutputStream()));
                // 1 - write file size
                writer.writeLong(binaryFile.length());
                writer.flush();
                // 2 - write file content
                input = new FileInputStream(binaryFile);
                byte[] buffer = new byte[config.getInt(FetcherProperties.DOWNLOAD_BUFFER)];
                long wrote = 0;
                for (int length = 0; (length = input.read(buffer)) > 0;) {
                    writer.write(buffer, 0, length);
                    checkSum.update(buffer, 0, length);
                    wrote += length;
                }
                // 3 - add the CRC so that we can verify the download
                writer.writeLong(checkSum.getValue());
                writer.flush();
                log.info("Sent file " + binaryFile + " to " + url + " with #bytes: " + wrote + " and checksum: "
                        + checkSum.getValue());
            } catch (IOException e) {
                log.error(e);
            } finally {
                try {
                    if (input != null) {
                        input.close();
                    }
                    if (writer != null) {
                        writer.close();
                    }
                } catch (IOException ignore) {
                }
            }
        }
    });
    try {
        if (blockUntilComplete) {
            while (future.isDone() || future.isCancelled()) {
                Thread.sleep(1000);
            }
        }
    } catch (InterruptedException e) {
        // interrupted!
    }
}

From source file:com.splout.db.dnode.HttpFileExchanger.java

@Override
public void handle(HttpExchange exchange) throws IOException {
    DataInputStream iS = null;//from  w  ww.  j  a v a2s .c  om
    FileOutputStream writer = null;
    File dest = null;

    String tablespace = null;
    Integer partition = null;
    Long version = null;

    try {
        iS = new DataInputStream(new GZIPInputStream(exchange.getRequestBody()));
        String fileName = exchange.getRequestHeaders().getFirst("filename");
        tablespace = exchange.getRequestHeaders().getFirst("tablespace");
        partition = Integer.valueOf(exchange.getRequestHeaders().getFirst("partition"));
        version = Long.valueOf(exchange.getRequestHeaders().getFirst("version"));

        dest = new File(
                new File(tempDir,
                        DNodeHandler.getLocalStoragePartitionRelativePath(tablespace, partition, version)),
                fileName);

        // just in case, avoid copying the same file concurrently
        // (but we also shouldn't avoid this in other levels of the app)
        synchronized (currentTransfersMonitor) {
            if (currentTransfers.containsKey(dest.toString())) {
                throw new IOException("Incoming file already being transferred - " + dest);
            }
            currentTransfers.put(dest.toString(), new Object());
        }

        if (!dest.getParentFile().exists()) {
            dest.getParentFile().mkdirs();
        }
        if (dest.exists()) {
            dest.delete();
        }

        writer = new FileOutputStream(dest);
        byte[] buffer = new byte[config.getInt(FetcherProperties.DOWNLOAD_BUFFER)];

        Checksum checkSum = new CRC32();

        // 1- Read file size
        long fileSize = iS.readLong();
        log.debug("Going to read file [" + fileName + "] of size: " + fileSize);
        // 2- Read file contents
        long readSoFar = 0;

        do {
            long missingBytes = fileSize - readSoFar;
            int bytesToRead = (int) Math.min(missingBytes, buffer.length);
            int read = iS.read(buffer, 0, bytesToRead);
            checkSum.update(buffer, 0, read);
            writer.write(buffer, 0, read);
            readSoFar += read;
            callback.onProgress(tablespace, partition, version, dest, fileSize, readSoFar);
        } while (readSoFar < fileSize);

        // 3- Read CRC
        long expectedCrc = iS.readLong();
        if (expectedCrc == checkSum.getValue()) {
            log.info("File [" + dest.getAbsolutePath() + "] received -> Checksum -- " + checkSum.getValue()
                    + " matches expected CRC [OK]");
            callback.onFileReceived(tablespace, partition, version, dest);
        } else {
            log.error("File received [" + dest.getAbsolutePath() + "] -> Checksum -- " + checkSum.getValue()
                    + " doesn't match expected CRC: " + expectedCrc);
            callback.onBadCRC(tablespace, partition, version, dest);
            dest.delete();
        }
    } catch (Throwable t) {
        log.error(t);
        callback.onError(t, tablespace, partition, version, dest);
        if (dest != null && dest.exists()
                && !t.getMessage().contains("Incoming file already being transferred")) {
            dest.delete();
        }
    } finally {
        if (writer != null) {
            writer.close();
        }
        if (iS != null) {
            iS.close();
        }
        if (dest != null) {
            currentTransfers.remove(dest.toString());
        }
    }
}

From source file:PngEncoder.java

/**
 * Writes the PLTE (Palate) chunk to the output stream.
 *
 * @param out the OutputStream to write the chunk to
 * @param csum the Checksum that is updated as data is written
 *             to the passed-in OutputStream
 * @throws IOException if a problem is encountered writing the output
 */// ww w.j  ava 2 s .  c om
private void writePlteChunk(OutputStream out, Checksum csum) throws IOException {
    IndexColorModel icm = (IndexColorModel) image.getColorModel();

    writeInt(out, 768); // Chunk Size
    csum.reset();
    out.write(PLTE);

    byte[] reds = new byte[256];
    icm.getReds(reds);

    byte[] greens = new byte[256];
    icm.getGreens(greens);

    byte[] blues = new byte[256];
    icm.getBlues(blues);

    for (int index = 0; index < 256; ++index) {
        out.write(reds[index]);
        out.write(greens[index]);
        out.write(blues[index]);
    }

    writeInt(out, (int) csum.getValue());
}

From source file:org.olat.core.util.vfs.version.VersionsFileManager.java

private boolean isSameFile(VFSLeaf currentFile, VersionsFileImpl versions) {
    boolean same = false;
    if (versions.getRevisions() != null && !versions.getRevisions().isEmpty()) {
        VFSRevision lastRevision = versions.getRevisions().get(versions.getRevisions().size() - 1);

        long lastSize = lastRevision.getSize();
        long currentSize = currentFile.getSize();
        if (currentSize == lastSize && currentSize > 0 && lastRevision instanceof RevisionFileImpl
                && currentFile instanceof LocalFileImpl) {
            RevisionFileImpl lastRev = ((RevisionFileImpl) lastRevision);
            LocalFileImpl current = (LocalFileImpl) currentFile;
            //can be the same file
            try {
                Checksum cm1 = FileUtils.checksum(((LocalFileImpl) lastRev.getFile()).getBasefile(),
                        new Adler32());
                Checksum cm2 = FileUtils.checksum(current.getBasefile(), new Adler32());
                same = cm1.getValue() == cm2.getValue();
            } catch (IOException e) {
                log.debug("Error calculating the checksum of files");
            }/*from  w w  w.  j  a  va 2s. c om*/
        }
    }
    return same;
}

From source file:org.apache.mnemonic.ChunkBufferNGTest.java

@Test(dependsOnMethods = { "testGenChunkBuffers" })
public void testCheckChunkBuffers() {
    Checksum cs = new CRC32();
    cs.reset();/*from   w w w  .  j  a  va2s. co  m*/
    NonVolatileMemAllocator act = new NonVolatileMemAllocator(
            Utils.getNonVolatileMemoryAllocatorService("pmalloc"), 1L, "./pmchunkbuffertest.dat", false);
    act.setChunkReclaimer(new Reclaim<Long>() {
        @Override
        public boolean reclaim(Long mres, Long sz) {
            System.out.println(String.format("Reclaim Memory Chunk: %X  Size: %s",
                    System.identityHashCode(mres), null == sz ? "NULL" : sz.toString()));
            return false;
        }
    });
    DurableChunk<NonVolatileMemAllocator> mch;
    mch = act.retrieveChunk(act.getHandler(m_keyid));
    Assert.assertNotNull(mch);
    long bufcnt = mch.getSize() / m_bufsize;

    ChunkBuffer ckbuf;
    byte[] buf;
    for (long idx = 0; idx < bufcnt; ++idx) {
        ckbuf = mch.getChunkBuffer(idx * m_bufsize, m_bufsize);
        Assert.assertNotNull(ckbuf);
        buf = new byte[m_bufsize];
        ckbuf.get().clear();
        ckbuf.get().get(buf);
        cs.update(buf, 0, buf.length);
    }
    act.close();

    Assert.assertEquals(m_checksum, cs.getValue());
    Assert.assertEquals(m_count, bufcnt);
    System.out.println(
            String.format("The checksum of chunk buffers are %d, Total count is %d", m_checksum, m_count));
}

From source file:PngEncoder.java

/**
 * writes the IHDR (Image Header) chunk to the output stream
 *
 * @param out the OutputStream to write the chunk to
 * @param csum the Checksum that is updated as data is written
 *             to the passed-in OutputStream
 * @throws IOException if a problem is encountered writing the output
 */// w ww  .j a  va 2  s.  co m
private void writeIhdrChunk(OutputStream out, Checksum csum) throws IOException {
    writeInt(out, 13); // Chunk Size
    csum.reset();
    out.write(IHDR);
    writeInt(out, width);
    writeInt(out, height);
    out.write(BIT_DEPTH);
    switch (outputBpp) {
    case 1:
        out.write(COLOR_TYPE_INDEXED);
        break;
    case 3:
        out.write(COLOR_TYPE_RGB);
        break;
    case 4:
        out.write(COLOR_TYPE_RGBA);
        break;
    default:
        throw new IllegalStateException("Invalid bytes per pixel");
    }
    out.write(0); // Compression Method
    out.write(0); // Filter Method
    out.write(0); // Interlace
    writeInt(out, (int) csum.getValue());
}