Example usage for java.util.zip Checksum update

List of usage examples for java.util.zip Checksum update

Introduction

In this page you can find the example usage for java.util.zip Checksum update.

Prototype

public void update(byte[] b, int off, int len);

Source Link

Document

Updates the current checksum with the specified array of bytes.

Usage

From source file:org.apache.mnemonic.collections.DurableArrayNGTest.java

protected DurableBuffer<NonVolatileMemAllocator> genuptBuffer(NonVolatileMemAllocator act, Checksum cs,
        int size) {
    DurableBuffer<NonVolatileMemAllocator> ret = null;
    ret = act.createBuffer(size, false);
    if (null == ret) {
        throw new OutOfHybridMemory("Create Durable Buffer Failed.");
    }/*w w  w.ja v  a 2 s  .  co m*/
    ret.get().clear();
    byte[] rdbytes = RandomUtils.nextBytes(size);
    Assert.assertNotNull(rdbytes);
    ret.get().put(rdbytes);
    cs.update(rdbytes, 0, rdbytes.length);
    ret.get().clear();
    return ret;
}

From source file:org.apache.mnemonic.DurablePersonNGTest.java

@Test(dependsOnMethods = { "testGenPeople" })
public void testCheckPeople() throws RetrieveDurableEntityError {
    NonVolatileMemAllocator act = new NonVolatileMemAllocator(
            Utils.getNonVolatileMemoryAllocatorService("pmalloc"), 1024 * 1024 * 8, "./pobj_person.dat", false);
    act.setBufferReclaimer(new Reclaim<ByteBuffer>() {
        @Override/*from ww w.j ava2s.  c  o  m*/
        public boolean reclaim(ByteBuffer mres, Long sz) {
            System.out.println(String.format("Reclaim Memory Buffer: %X  Size: %s",
                    System.identityHashCode(mres), null == sz ? "NULL" : sz.toString()));
            return false;
        }
    });
    act.setChunkReclaimer(new Reclaim<Long>() {
        @Override
        public boolean reclaim(Long mres, Long sz) {
            System.out.println(String.format("Reclaim Memory Chunk: %X  Size: %s",
                    System.identityHashCode(mres), null == sz ? "NULL" : sz.toString()));
            return false;
        }
    });

    Checksum pic_cs = new CRC32();
    pic_cs.reset();
    Checksum fp_cs = new CRC32();
    fp_cs.reset();
    long size;
    byte[] buf;

    long val;
    for (long i = 0; i < cKEYCAPACITY; ++i) {
        System.out.printf("----------Key %d--------------\n", i);
        val = act.getHandler(i);
        if (0L == val) {
            break;
        }
        Person<Integer> person = PersonFactory.restore(act, val, true);
        while (null != person) {
            person.testOutput();
            person.getPicture().get().clear();
            buf = new byte[person.getPicture().get().capacity()];
            person.getPicture().get().get(buf);
            pic_cs.update(buf, 0, buf.length);
            byte b;
            for (int j = 0; j < person.getPreference().getSize(); ++j) {
                b = unsafe.getByte(person.getPreference().get() + j);
                fp_cs.update(b);
            }
            person = person.getMother();
        }
    }

    act.close();
    Assert.assertEquals(pic_cs.getValue(), pic_checksum);
    Assert.assertEquals(fp_cs.getValue(), fp_checksum);
}

From source file:com.splout.db.dnode.HttpFileExchanger.java

public void send(final String tablespace, final int partition, final long version, final File binaryFile,
        final String url, boolean blockUntilComplete) {
    Future<?> future = clientExecutors.submit(new Runnable() {
        @Override/*from  w  w w.  java 2s.  c  o m*/
        public void run() {
            DataOutputStream writer = null;
            InputStream input = null;
            try {
                HttpURLConnection connection = (HttpURLConnection) new URL(url).openConnection();
                connection.setChunkedStreamingMode(config.getInt(FetcherProperties.DOWNLOAD_BUFFER));
                connection.setDoOutput(true);
                connection.setRequestProperty("filename", binaryFile.getName());
                connection.setRequestProperty("tablespace", tablespace);
                connection.setRequestProperty("partition", partition + "");
                connection.setRequestProperty("version", version + "");

                Checksum checkSum = new CRC32();

                writer = new DataOutputStream(new GZIPOutputStream(connection.getOutputStream()));
                // 1 - write file size
                writer.writeLong(binaryFile.length());
                writer.flush();
                // 2 - write file content
                input = new FileInputStream(binaryFile);
                byte[] buffer = new byte[config.getInt(FetcherProperties.DOWNLOAD_BUFFER)];
                long wrote = 0;
                for (int length = 0; (length = input.read(buffer)) > 0;) {
                    writer.write(buffer, 0, length);
                    checkSum.update(buffer, 0, length);
                    wrote += length;
                }
                // 3 - add the CRC so that we can verify the download
                writer.writeLong(checkSum.getValue());
                writer.flush();
                log.info("Sent file " + binaryFile + " to " + url + " with #bytes: " + wrote + " and checksum: "
                        + checkSum.getValue());
            } catch (IOException e) {
                log.error(e);
            } finally {
                try {
                    if (input != null) {
                        input.close();
                    }
                    if (writer != null) {
                        writer.close();
                    }
                } catch (IOException ignore) {
                }
            }
        }
    });
    try {
        if (blockUntilComplete) {
            while (future.isDone() || future.isCancelled()) {
                Thread.sleep(1000);
            }
        }
    } catch (InterruptedException e) {
        // interrupted!
    }
}

From source file:org.apache.mnemonic.collections.DurableArrayNGTest.java

@Test(enabled = true)
public void testGetSetArrayBuffer() {
    DurableType gtypes[] = { DurableType.BUFFER };
    int capacity = 10;
    DurableArray<DurableBuffer> array = DurableArrayFactory.create(m_act, null, gtypes, capacity, false);

    Long handler = array.getHandler();
    long bufVal;//from w  w  w .j a  va2s  .c o  m

    Checksum bufferCheckSum = new CRC32();
    bufferCheckSum.reset();

    for (int i = 0; i < capacity; i++) {
        array.set(i, genuptBuffer(m_act, bufferCheckSum, genRandSize()));
    }
    bufVal = bufferCheckSum.getValue();

    bufferCheckSum.reset();

    for (int i = 0; i < capacity; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = array.get(i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
        db.get().clear();
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);
    bufferCheckSum.reset();

    DurableArray<DurableBuffer> restoredArray = DurableArrayFactory.restore(m_act, null, gtypes, handler,
            false);
    for (int i = 0; i < capacity; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = restoredArray.get(i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
        db.get().clear();
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    bufferCheckSum.reset();
    Iterator<DurableBuffer> itr = restoredArray.iterator();
    int val = 0;
    while (itr.hasNext()) {
        DurableBuffer<NonVolatileMemAllocator> db = itr.next();
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
        db.get().clear();
        val++;
    }
    Assert.assertEquals(val, capacity);
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    restoredArray.destroy();
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

protected DurableBuffer<NonVolatileMemAllocator> genuptBuffer(NonVolatileMemAllocator act, Checksum cs,
        int size) {
    DurableBuffer<NonVolatileMemAllocator> ret = null;
    ret = act.createBuffer(size, false);
    if (null == ret) {
        throw new OutOfHybridMemory("Create Durable Buffer Failed.");
    }//from  w w w  .ja  va 2s. c o  m
    ret.get().clear();
    byte[] rdbytes = RandomUtils.nextBytes(size);
    Assert.assertNotNull(rdbytes);
    ret.get().put(rdbytes);
    cs.update(rdbytes, 0, rdbytes.length);
    return ret;
}

From source file:org.apache.mnemonic.ChunkBufferNGTest.java

@Test(dependsOnMethods = { "testGenChunkBuffers" })
public void testCheckChunkBuffers() {
    Checksum cs = new CRC32();
    cs.reset();//from  ww  w .j a v a  2 s  .co  m
    NonVolatileMemAllocator act = new NonVolatileMemAllocator(
            Utils.getNonVolatileMemoryAllocatorService("pmalloc"), 1L, "./pmchunkbuffertest.dat", false);
    act.setChunkReclaimer(new Reclaim<Long>() {
        @Override
        public boolean reclaim(Long mres, Long sz) {
            System.out.println(String.format("Reclaim Memory Chunk: %X  Size: %s",
                    System.identityHashCode(mres), null == sz ? "NULL" : sz.toString()));
            return false;
        }
    });
    DurableChunk<NonVolatileMemAllocator> mch;
    mch = act.retrieveChunk(act.getHandler(m_keyid));
    Assert.assertNotNull(mch);
    long bufcnt = mch.getSize() / m_bufsize;

    ChunkBuffer ckbuf;
    byte[] buf;
    for (long idx = 0; idx < bufcnt; ++idx) {
        ckbuf = mch.getChunkBuffer(idx * m_bufsize, m_bufsize);
        Assert.assertNotNull(ckbuf);
        buf = new byte[m_bufsize];
        ckbuf.get().clear();
        ckbuf.get().get(buf);
        cs.update(buf, 0, buf.length);
    }
    act.close();

    Assert.assertEquals(m_checksum, cs.getValue());
    Assert.assertEquals(m_count, bufcnt);
    System.out.println(
            String.format("The checksum of chunk buffers are %d, Total count is %d", m_checksum, m_count));
}

From source file:org.apache.mnemonic.collections.DurableHashMapNGTest.java

@Test(enabled = true)
public void testMapValueBuffer() {
    DurableType gtypes[] = { DurableType.STRING, DurableType.BUFFER };
    DurableHashMap<String, DurableBuffer> map = DurableHashMapFactory.create(m_act, null, gtypes, 1, false);
    long bufVal;//w w  w  . j  a v a2  s.c o m

    Checksum bufferCheckSum = new CRC32();
    bufferCheckSum.reset();

    Long handler = map.getHandler();
    for (int i = 0; i < 10; i++) {
        map.put("buffer" + i, genuptBuffer(m_act, bufferCheckSum, genRandSize()));
    }

    bufVal = bufferCheckSum.getValue();

    bufferCheckSum.reset();
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = map.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    bufferCheckSum.reset();
    DurableHashMap<String, DurableBuffer> restoredMap = DurableHashMapFactory.restore(m_act, null, gtypes,
            handler, false);
    for (int i = 0; i < 10; i++) {
        DurableBuffer<NonVolatileMemAllocator> db = restoredMap.get("buffer" + i);
        Assert.assertNotNull(db);
        byte buf[] = new byte[db.get().capacity()];
        db.get().get(buf);
        bufferCheckSum.update(buf, 0, buf.length);
    }
    Assert.assertEquals(bufferCheckSum.getValue(), bufVal);

    restoredMap.destroy();
}

From source file:org.apache.mnemonic.ChunkBufferNGTest.java

@Test
public void testGenChunkBuffers() {
    Checksum cs = new CRC32();
    cs.reset();/*from  w w w .j  av  a 2s . c o m*/

    NonVolatileMemAllocator act = new NonVolatileMemAllocator(
            Utils.getNonVolatileMemoryAllocatorService("pmalloc"), 1024 * 1024 * 1024L,
            "./pmchunkbuffertest.dat", true);
    act.setChunkReclaimer(new Reclaim<Long>() {
        @Override
        public boolean reclaim(Long mres, Long sz) {
            System.out.println(String.format("Reclaim Memory Chunk: %X  Size: %s",
                    System.identityHashCode(mres), null == sz ? "NULL" : sz.toString()));
            return false;
        }
    });
    DurableChunk<NonVolatileMemAllocator> mch;
    mch = act.createChunk(1000 * 1024 * 1024L);
    Assert.assertNotNull(mch);
    act.setHandler(m_keyid, mch.getHandler());
    long bufcnt = mch.getSize() / m_bufsize;
    ChunkBuffer ckbuf;
    byte[] rdbytes;
    for (long idx = 0; idx < bufcnt; ++idx) {
        //      System.err.println(String.format("---- bufcnt: %d, bufsize: %d, idx: %d", bufcnt, m_bufsize, idx));
        ckbuf = mch.getChunkBuffer(idx * m_bufsize, m_bufsize);
        Assert.assertNotNull(ckbuf);
        rdbytes = RandomUtils.nextBytes(m_bufsize);
        Assert.assertNotNull(rdbytes);
        ckbuf.get().clear();
        ckbuf.get().put(rdbytes);
        cs.update(rdbytes, 0, rdbytes.length);
    }
    m_checksum = cs.getValue();
    m_count = bufcnt;
    act.close();
}

From source file:org.apache.mnemonic.mapreduce.MneMapreduceBufferDataTest.java

@Test(enabled = true, dependsOnMethods = { "testWriteBufferData" })
public void testReadBufferData() throws Exception {
    long reccnt = 0L;
    long tsize = 0L;
    byte[] buf;/*from   ww  w.  j  a v  a2  s.c  o m*/
    Checksum cs = new CRC32();
    cs.reset();
    File folder = new File(m_workdir.toString());
    File[] listfiles = folder.listFiles();
    for (int idx = 0; idx < listfiles.length; ++idx) {
        if (listfiles[idx].isFile()
                && listfiles[idx].getName().startsWith(MneConfigHelper.getBaseOutputName(m_conf, null))
                && listfiles[idx].getName().endsWith(MneConfigHelper.DEFAULT_FILE_EXTENSION)) {
            m_partfns.add(listfiles[idx].getName());
        }
    }
    Collections.sort(m_partfns); // keep the order for checksum
    for (int idx = 0; idx < m_partfns.size(); ++idx) {
        System.out.println(String.format("Verifying : %s", m_partfns.get(idx)));
        FileSplit split = new FileSplit(new Path(m_workdir, m_partfns.get(idx)), 0, 0L, new String[0]);
        InputFormat<NullWritable, MneDurableInputValue<DurableBuffer<?>>> inputFormat = new MneInputFormat<MneDurableInputValue<DurableBuffer<?>>, DurableBuffer<?>>();
        RecordReader<NullWritable, MneDurableInputValue<DurableBuffer<?>>> reader = inputFormat
                .createRecordReader(split, m_tacontext);
        MneDurableInputValue<DurableBuffer<?>> dbufval = null;
        while (reader.nextKeyValue()) {
            dbufval = reader.getCurrentValue();
            assert dbufval.getValue().getSize() == dbufval.getValue().get().capacity();
            dbufval.getValue().get().clear();
            buf = new byte[dbufval.getValue().get().capacity()];
            dbufval.getValue().get().get(buf);
            cs.update(buf, 0, buf.length);
            tsize += dbufval.getValue().getSize();
            ++reccnt;
        }
        reader.close();
    }
    AssertJUnit.assertEquals(m_reccnt, reccnt);
    AssertJUnit.assertEquals(m_totalsize, tsize);
    AssertJUnit.assertEquals(m_checksum, cs.getValue());
    System.out.println(String.format("The checksum of buffer is %d", m_checksum));
}

From source file:com.cisco.dvbu.ps.common.util.CommonUtils.java

/**
* Returns a sum of CRC32 checksums of all lines/rows in a file.
* This method is used to compare files with the same lines/rows, which may be in different order, in which case we
* still want to consider them equal (from the point of view of containing the same data)
* In such case this method will return the same result.
* 
* This is useful when the file contains results of a database query and we need to compare
* results of two queries that may return the same data but in different order.
*      //from   w w w  .j  a v  a 2s.co  m
* @author             SST
* @param filePath      file name with full path
* @return            sum of checksums of each line(row) from the input file
*                   The type of this value could be long for files up to probably several GB in size.
*                   BigInteger was chosen in case even bigger files are used.
* @throws IOException
*/
public static BigInteger fileChecksumByRow(String filePath) throws IOException {

    BigInteger sumOfcheckSumValues = new BigInteger("0");
    long currentLineCheckSumValue = 0L;
    Checksum checksum = new CRC32();

    BufferedReader br = new BufferedReader(new FileReader(filePath));
    String line;

    //       System.out.println("currentLineCheckSumValue: ");
    while ((line = br.readLine()) != null) {
        // Read one line at a time

        byte bytes[] = line.getBytes();
        checksum.reset();
        checksum.update(bytes, 0, bytes.length);

        currentLineCheckSumValue = checksum.getValue();
        //          System.out.println(currentLineCheckSumValue);

        sumOfcheckSumValues = sumOfcheckSumValues.add(BigInteger.valueOf(currentLineCheckSumValue));
    }
    br.close();
    //       System.out.println("fileChecksumByRow(): sumOfcheckSumValues = " + sumOfcheckSumValues);      
    return sumOfcheckSumValues;
}