Example usage for java.util.zip CRC32 update

List of usage examples for java.util.zip CRC32 update

Introduction

In this page you can find the example usage for java.util.zip CRC32 update.

Prototype

@Override
public void update(byte[] b, int off, int len) 

Source Link

Document

Updates the CRC-32 checksum with the specified array of bytes.

Usage

From source file:org.commoncrawl.util.MultiFileMergeUtils.java

public static void main(String[] args) {

    Path testPath = new Path(args[0]);

    LOG.info("Initializing Hadoop Config");

    Configuration conf = new Configuration();

    conf.addResource("nutch-default.xml");
    conf.addResource("nutch-site.xml");
    conf.addResource("mapred-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("commoncrawl-default.xml");
    conf.addResource("commoncrawl-site.xml");

    conf.setClass(MultiFileInputReader.MULTIFILE_COMPARATOR_CLASS, URLFPV2RawComparator.class,
            RawComparator.class);
    conf.setClass(MultiFileInputReader.MULTIFILE_KEY_CLASS, URLFPV2.class, WritableComparable.class);

    CrawlEnvironment.setHadoopConfig(conf);
    CrawlEnvironment.setDefaultHadoopFSURI("hdfs://ccn01:9000/");

    try {//from  w ww. jav a  2 s. c  om
        FileSystem fs = CrawlEnvironment.getDefaultFileSystem();

        Vector<Path> paths = new Vector<Path>();

        paths.add(new Path(testPath, "part-00000"));
        // paths.add(new Path(testPath,"part-00000"));
        paths.add(new Path(testPath, "part-00001"));

        TreeSet<URLFPV2> directReadSet = new TreeSet<URLFPV2>();
        TreeSet<URLFPV2> multiFileReadSet = new TreeSet<URLFPV2>();

        MultiFileInputReader<URLFPV2> inputReader = new MultiFileInputReader<URLFPV2>(fs, paths, conf);

        KeyAndValueData<URLFPV2> keyValueData = null;
        int multiFileKeyCount = 0;
        while ((keyValueData = inputReader.readNextItem()) != null) {
            LOG.info("Got Key Domain:" + keyValueData._keyObject.getDomainHash() + " URLHash:"
                    + keyValueData._keyObject.getUrlHash() + " Item Count:" + keyValueData._values.size()
                    + " Path[0]:" + keyValueData._values.get(0).source);

            if (keyValueData._values.size() > 1) {
                LOG.error("Got more than one item");
                for (int i = 0; i < keyValueData._values.size(); ++i) {
                    CRC32 crc = new CRC32();
                    crc.update(keyValueData._keyData.getData(), 0, keyValueData._keyData.getLength());
                    LOG.error("Item at[" + i + "] Path:" + keyValueData._values.get(i).source + " CRC:"
                            + crc.getValue());
                }
            }
            if (multiFileKeyCount++ < 1000)
                multiFileReadSet.add((URLFPV2) keyValueData._keyObject.clone());
        }
        inputReader.close();

        addFirstNFPItemsToSet(fs, new Path(testPath, "part-00000"), conf, directReadSet, 1000);
        addFirstNFPItemsToSet(fs, new Path(testPath, "part-00001"), conf, directReadSet, 1000);

        Iterator<URLFPV2> directReadIterator = directReadSet.iterator();
        Iterator<URLFPV2> multiFileReadIterator = multiFileReadSet.iterator();

        for (int i = 0; i < 1000; ++i) {
            URLFPV2 directReadFP = directReadIterator.next();
            URLFPV2 multiFileReadFP = multiFileReadIterator.next();

            if (directReadFP.compareTo(multiFileReadFP) != 0) {
                LOG.info("Mismatch at Index:" + i);
            }
        }

    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
    } catch (CloneNotSupportedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:brut.util.BrutIO.java

public static CRC32 calculateCrc(InputStream input) throws IOException {
    CRC32 crc = new CRC32();
    int bytesRead;
    byte[] buffer = new byte[8192];
    while ((bytesRead = input.read(buffer)) != -1) {
        crc.update(buffer, 0, bytesRead);
    }/*from   w  w w  . j a  va  2s.  co m*/
    return crc;
}

From source file:org.broad.igv.util.Utilities.java

/**
 * @param buffer//from ww w  .  j  a v a2  s .co  m
 * @return
 * @throws java.io.IOException
 */
static private long getCrc(byte[] buffer) throws IOException {

    CRC32 crc = new CRC32();

    crc.reset();
    crc.update(buffer, 0, buffer.length);
    return crc.getValue();
}

From source file:org.mariotaku.twidere.util.TwitterContentUtils.java

public static boolean isOfficialKey(final Context context, final String consumerKey,
        final String consumerSecret) {
    if (context == null || consumerKey == null || consumerSecret == null)
        return false;
    final String[] keySecrets = context.getResources()
            .getStringArray(R.array.values_official_consumer_secret_crc32);
    final CRC32 crc32 = new CRC32();
    final byte[] consumerSecretBytes = consumerSecret.getBytes(Charset.forName("UTF-8"));
    crc32.update(consumerSecretBytes, 0, consumerSecretBytes.length);
    final long value = crc32.getValue();
    crc32.reset();/* ww  w  .ja  v a2  s .  c o  m*/
    for (final String keySecret : keySecrets) {
        if (Long.parseLong(keySecret, 16) == value)
            return true;
    }
    return false;
}

From source file:org.mariotaku.twidere.util.TwitterContentUtils.java

public static String getOfficialKeyName(final Context context, final String consumerKey,
        final String consumerSecret) {
    if (context == null || consumerKey == null || consumerSecret == null)
        return null;
    final String[] keySecrets = context.getResources()
            .getStringArray(R.array.values_official_consumer_secret_crc32);
    final String[] keyNames = context.getResources().getStringArray(R.array.names_official_consumer_secret);
    final CRC32 crc32 = new CRC32();
    final byte[] consumerSecretBytes = consumerSecret.getBytes(Charset.forName("UTF-8"));
    crc32.update(consumerSecretBytes, 0, consumerSecretBytes.length);
    final long value = crc32.getValue();
    crc32.reset();/*w w w.  j a  v  a 2  s .  c o  m*/
    for (int i = 0, j = keySecrets.length; i < j; i++) {
        if (Long.parseLong(keySecrets[i], 16) == value)
            return keyNames[i];
    }
    return null;
}

From source file:org.mariotaku.twidere.util.TwitterContentUtils.java

@NonNull
public static ConsumerKeyType getOfficialKeyType(final Context context, final String consumerKey,
        final String consumerSecret) {
    if (context == null || consumerKey == null || consumerSecret == null) {
        return ConsumerKeyType.UNKNOWN;
    }/* w  w  w  . jav a 2s  .  c  om*/
    final String[] keySecrets = context.getResources()
            .getStringArray(R.array.values_official_consumer_secret_crc32);
    final String[] keyNames = context.getResources().getStringArray(R.array.types_official_consumer_secret);
    final CRC32 crc32 = new CRC32();
    final byte[] consumerSecretBytes = consumerSecret.getBytes(Charset.forName("UTF-8"));
    crc32.update(consumerSecretBytes, 0, consumerSecretBytes.length);
    final long value = crc32.getValue();
    crc32.reset();
    for (int i = 0, j = keySecrets.length; i < j; i++) {
        if (Long.parseLong(keySecrets[i], 16) == value) {
            return ConsumerKeyType.parse(keyNames[i]);
        }
    }
    return ConsumerKeyType.UNKNOWN;
}

From source file:org.apache.hadoop.raid.TestDirectoryRaidDfs.java

static public void corruptBlocksInDirectory(Configuration conf, Path srcDir, long[] crcs,
        Integer[] listBlockNumToCorrupt, FileSystem fileSys, MiniDFSCluster cluster, boolean validate,
        boolean reportBadBlocks) throws IOException {
    long[] lengths = new long[crcs.length];
    // Get all block Info;
    ArrayList<BlockInfo> blocks = new ArrayList<BlockInfo>();
    List<FileStatus> lfs = RaidNode.listDirectoryRaidFileStatus(conf, fileSys, srcDir);
    assertNotNull(lfs);/*  ww  w  .  j  a  va 2s .  c o m*/
    for (int fid = 0; fid < lfs.size(); fid++) {
        FileStatus fsStat = lfs.get(fid);
        long numBlock = RaidNode.getNumBlocks(fsStat);
        for (int bid = 0; bid < numBlock; bid++) {
            blocks.add(new BlockInfo(fid, bid));
        }
        lengths[fid] = fsStat.getLen();
    }
    HashSet<Integer> affectedFiles = new HashSet<Integer>();
    HashSet<Integer> affectedBlocks = new HashSet<Integer>();
    // corrupt blocks
    for (int blockNumToCorrupt : listBlockNumToCorrupt) {
        if (blockNumToCorrupt >= blocks.size()) {
            continue;
        }
        BlockInfo bi = null;
        int blockIndex = blockNumToCorrupt;
        if (blockNumToCorrupt < 0) {
            blockIndex = blocks.size() + blockNumToCorrupt;
            if (blockIndex < 0) {
                continue;
            }
        }
        if (affectedBlocks.contains(blockIndex)) {
            continue;
        }
        affectedBlocks.add(blockIndex);
        bi = blocks.get(blockIndex);
        FileStatus srcFileFs = lfs.get(bi.fileIdx);
        Path srcFile = srcFileFs.getPath();
        LOG.info("Corrupt block " + bi.blockId + " of file " + srcFile);
        LocatedBlocks locations = RaidDFSUtil.getBlockLocations((DistributedFileSystem) fileSys,
                srcFile.toUri().getPath(), 0L, srcFileFs.getLen());
        TestRaidDfs.corruptBlock(srcFile, locations.get(bi.blockId).getBlock(), NUM_DATANODES, true, cluster);
        if (reportBadBlocks) {
            cluster.getNameNode().reportBadBlocks(new LocatedBlock[] { locations.get(bi.blockId) });
        }
        affectedFiles.add(bi.fileIdx);
    }
    // validate files
    if (validate) {
        DistributedRaidFileSystem raidfs = getRaidFS(fileSys, conf);
        for (Integer fid : affectedFiles) {
            FileStatus stat = lfs.get(fid);
            assertTrue(TestRaidDfs.validateFile(raidfs, stat.getPath(), lengths[fid], crcs[fid]));
            // test readFully
            byte[] filebytes = new byte[(int) stat.getLen()];
            FSDataInputStream stm = raidfs.open(stat.getPath());
            stm.readFully(0, filebytes);
            CRC32 crc = new CRC32();
            crc.update(filebytes, 0, filebytes.length);
            assertEquals(crcs[fid], crc.getValue());
        }
    }
}

From source file:com.hadoop.compression.lzo.LzopInputStream.java

/**
 * Read bytes, update checksums, return first four bytes as an int, first
 * byte read in the MSB./* w ww .  j  av  a2s . c o  m*/
 */
private static int readHeaderItem(InputStream in, byte[] buf, int len, Adler32 adler, CRC32 crc32)
        throws IOException {
    int ret = readInt(in, buf, len);
    adler.update(buf, 0, len);
    crc32.update(buf, 0, len);
    Arrays.fill(buf, (byte) 0);
    return ret;
}

From source file:JarUtil.java

/**
 * @param entry//from  w  ww  .  j  a v  a 2 s. com
 * @param in
 * @param out
 * @param crc
 * @param buffer
 * @throws IOException
 */
private static void add(JarEntry entry, InputStream in, JarOutputStream out, CRC32 crc, byte[] buffer)
        throws IOException {
    out.putNextEntry(entry);
    int read;
    long size = 0;
    while ((read = in.read(buffer)) != -1) {
        crc.update(buffer, 0, read);
        out.write(buffer, 0, read);
        size += read;
    }
    entry.setCrc(crc.getValue());
    entry.setSize(size);
    in.close();
    out.closeEntry();
    crc.reset();
}

From source file:com.smash.revolance.ui.model.helper.ArchiveHelper.java

public static File buildArchive(File archive, File... files) throws FileNotFoundException {
    FileOutputStream fos = new FileOutputStream(archive);
    ZipOutputStream zos = new ZipOutputStream(fos);
    int bytesRead;
    byte[] buffer = new byte[1024];
    CRC32 crc = new CRC32();

    for (File file : files) {
        if (!file.exists()) {
            System.err.println("Skipping: " + file);
            continue;
        }//from   w ww. j  a  v  a  2  s.co m
        BufferedInputStream bis = null;
        try {
            bis = new BufferedInputStream(new FileInputStream(file));
            crc.reset();
            while ((bytesRead = bis.read(buffer)) != -1) {
                crc.update(buffer, 0, bytesRead);
            }

            bis.close();

            // Reset to beginning of input stream
            bis = new BufferedInputStream(new FileInputStream(file));
            String entryPath = FileHelper.getRelativePath(archive.getParentFile(), file);

            ZipEntry entry = new ZipEntry(entryPath);
            entry.setMethod(ZipEntry.STORED);
            entry.setCompressedSize(file.length());
            entry.setSize(file.length());
            entry.setCrc(crc.getValue());
            zos.putNextEntry(entry);
            while ((bytesRead = bis.read(buffer)) != -1) {
                zos.write(buffer, 0, bytesRead);
            }
        } catch (FileNotFoundException e) {

        } catch (IOException e) {
            e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
        } finally {
            IOUtils.closeQuietly(bis);
        }
    }
    IOUtils.closeQuietly(zos);
    return archive;
}