Example usage for org.apache.commons.codec.digest MessageDigestAlgorithms SHA_1

List of usage examples for org.apache.commons.codec.digest MessageDigestAlgorithms SHA_1

Introduction

In this page you can find the example usage for org.apache.commons.codec.digest MessageDigestAlgorithms SHA_1.

Prototype

String SHA_1

To view the source code for org.apache.commons.codec.digest MessageDigestAlgorithms SHA_1.

Click Source Link

Usage

From source file:de.elomagic.carafile.client.CaraFileUtils.java

/**
 * Creates a meta file from the given file.
 *
 * @param path Path of the file/*ww  w  .  ja  v  a 2s.c om*/
 * @param filename Real name of the file because it can differ from path parameter
 * @return
 * @throws IOException
 * @throws GeneralSecurityException
 */
public static final MetaData createMetaData(final Path path, final String filename)
        throws IOException, GeneralSecurityException {
    if (Files.notExists(path)) {
        throw new FileNotFoundException("File " + path.toString() + " not found!");
    }

    if (Files.isDirectory(path)) {
        throw new IllegalArgumentException("Not a file: " + path.toString());
    }

    MetaData md = new MetaData();
    md.setSize(Files.size(path));
    md.setFilename(filename);
    md.setCreationDate(new Date());
    md.setChunkSize(DEFAULT_PIECE_SIZE);

    try (InputStream in = Files.newInputStream(path, StandardOpenOption.READ);
            BufferedInputStream bin = new BufferedInputStream(in)) {
        MessageDigest mdComplete = MessageDigest.getInstance(MessageDigestAlgorithms.SHA_1);

        byte[] buffer = new byte[DEFAULT_PIECE_SIZE];
        int bytesRead;

        while ((bytesRead = bin.read(buffer)) > 0) {
            mdComplete.update(buffer, 0, bytesRead);

            ChunkData chunk = new ChunkData(
                    DigestUtils.sha1Hex(new ByteArrayInputStream(buffer, 0, bytesRead)));
            md.addChunk(chunk);
        }

        String sha1 = Hex.encodeHexString(mdComplete.digest());
        md.setId(sha1);
    }

    return md;
}

From source file:com.aoppp.gatewaysdk.internal.hw.DigestUtils2.java

/**
 * Returns an SHA-1 digest.//  w  ww  .  j  a v  a2s  .  c  o m
 *
 * @return An SHA-1 digest instance.
 * @throws IllegalArgumentException
 *             when a {@link NoSuchAlgorithmException} is caught, which should never happen because SHA-1 is a
 *             built-in algorithm
 * @see MessageDigestAlgorithms#SHA_1
 * @since 1.7
 */
public static MessageDigest getSha1Digest() {
    return getDigest(MessageDigestAlgorithms.SHA_1);
}

From source file:uk.bl.wa.util.HashedCachedInputStream.java

/**
 * @param header//from ww  w. ja v  a 2  s .c o m
 * @param in
 * @param length
 */
private void init(ArchiveRecordHeader header, InputStream in, long length) {
    url = Normalisation.sanitiseWARCHeaderValue(header.getUrl());
    try {
        digest = MessageDigest.getInstance(MessageDigestAlgorithms.SHA_1);
    } catch (NoSuchAlgorithmException e) {
        log.error("Hashing: " + url + "@" + header.getOffset(), e);
    }

    try {
        if (header.getHeaderFieldKeys().contains(HEADER_KEY_PAYLOAD_DIGEST)) {
            headerHash = (String) header.getHeaderValue(HEADER_KEY_PAYLOAD_DIGEST);
        }

        // Create a suitable outputstream for caching the content:
        OutputStream cache = null;
        if (length < inMemoryThreshold) {
            inMemory = true;
            cache = new ByteArrayOutputStream();
        } else {
            inMemory = false;
            cacheFile = File.createTempFile("warc-indexer", ".cache");
            cacheFile.deleteOnExit();
            cache = new FileOutputStream(cacheFile);
        }

        DigestInputStream dinput = new DigestInputStream(in, digest);

        long toCopy = length;
        if (length > this.onDiskThreshold) {
            toCopy = this.onDiskThreshold;
        }
        IOUtils.copyLarge(dinput, cache, 0, toCopy);
        cache.close();

        // Read the remainder of the stream, to get the hash.
        if (length > this.onDiskThreshold) {
            truncated = true;
            IOUtils.skip(dinput, length - this.onDiskThreshold);
        }

        hash = "sha1:" + Base32.encode(digest.digest());

        // For response records, check the hash is consistent with any header hash:
        if ((headerHash != null) && (hash.length() == headerHash.length())) {
            if (header.getHeaderFieldKeys().contains(HEADER_KEY_TYPE) && header.getHeaderValue(HEADER_KEY_TYPE)
                    .equals(WARCConstants.WARCRecordType.response.toString())) {
                if (!headerHash.equals(hash)) {
                    log.error("Hashes are not equal for this input!");
                    log.error(" - payload hash from header = " + headerHash);
                    log.error(" - payload hash from content = " + hash);
                    throw new RuntimeException("Hash check failed!");
                } else {
                    log.debug("Hashes were found to match for " + url);
                }
            } else {
                // For revisit records, use the hash of the revisited payload:
                // TODO this should actually only do it for revisit type records.
                this.hash = this.headerHash;
            }
        }

        // Now set up the inputStream
        if (inMemory) {
            this.cacheBytes = ((ByteArrayOutputStream) cache).toByteArray();
            // Encourage GC
            cache = null;
        }
    } catch (Exception i) {
        log.error("Hashing: " + url + "@" + header.getOffset(), i);
    }
}