Example usage for com.google.common.hash Hashing md5

List of usage examples for com.google.common.hash Hashing md5

Introduction

In this page you can find the example usage for com.google.common.hash Hashing md5.

Prototype

public static HashFunction md5() 

Source Link

Document

Returns a hash function implementing the MD5 hash algorithm (128 hash bits) by delegating to the MD5 MessageDigest .

Usage

From source file:brooklyn.entity.webapp.jboss.JBoss7SshDriver.java

/**
 * Creates a hash of a username, password and security realm that is suitable for use
 * with AS7 and Wildfire.// w w  w .  ja  v  a 2  s .c  o m
 * <p/>
 * Although AS7 has an <code>add-user.sh</code> script it is unsuitable for use in
 * non-interactive modes. (See AS7-5061 for details.) Versions 7.1.2+ (EAP) accept
 * a <code>--silent</code> flag. When this entity is updated past 7.1.1 we should
 * probably use that instead.
 * <p/>
 * This method mirrors AS7 and Wildfire's method of hashing user's passwords. Refer
 * to its class <code>UsernamePasswordHashUtil.generateHashedURP</code> for their
 * implementation.
 *
 * @see <a href="https://issues.jboss.org/browse/AS7-5061">AS7-5061</a>
 * @see <a href="https://github.com/jboss-remoting/jboss-sasl/blob/master/src/main/java/org/jboss/sasl/util/UsernamePasswordHashUtil.java">
 *     UsernamePasswordHashUtil.generateHashedURP</a>
 * @return <code>HEX(MD5(username ':' realm ':' password))</code>
 */
public static String hashPassword(String username, String password, String realm) {
    String concat = username + ":" + realm + ":" + password;
    byte[] hashed = Hashing.md5().hashString(concat, Charsets.UTF_8).asBytes();
    return BaseEncoding.base16().lowerCase().encode(hashed);
}

From source file:org.dllearner.reasoning.ClosedWorldReasoner.java

private void loadOrDematerialize() {
    if (useMaterializationCaching) {
        File cacheDir = new File("cache");
        cacheDir.mkdirs();//from w  w w. j  a v a2 s  . c o m
        HashFunction hf = Hashing.md5();
        Hasher hasher = hf.newHasher();
        hasher.putBoolean(materializeExistentialRestrictions);
        hasher.putBoolean(handlePunning);
        for (OWLOntology ont : Collections.singleton(baseReasoner.getOntology())) {
            hasher.putInt(ont.getLogicalAxioms().hashCode());
            hasher.putInt(ont.getAxioms().hashCode());
        }
        String filename = hasher.hash().toString() + ".obj";

        File cacheFile = new File(cacheDir, filename);
        if (cacheFile.exists()) {
            logger.debug("Loading materialization from disk...");
            try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(cacheFile))) {
                Materialization mat = (Materialization) ois.readObject();
                classInstancesPos = mat.classInstancesPos;
                classInstancesNeg = mat.classInstancesNeg;
                opPos = mat.opPos;
                dpPos = mat.dpPos;
                bdPos = mat.bdPos;
                bdNeg = mat.bdNeg;
                dd = mat.dd;
                id = mat.id;
                sd = mat.sd;
            } catch (ClassNotFoundException | IOException e) {
                e.printStackTrace();
            }
            logger.debug("done.");
        } else {
            materialize();
            Materialization mat = new Materialization();
            mat.classInstancesPos = classInstancesPos;
            mat.classInstancesNeg = classInstancesNeg;
            mat.opPos = opPos;
            mat.dpPos = dpPos;
            mat.bdPos = bdPos;
            mat.bdNeg = bdNeg;
            mat.dd = dd;
            mat.id = id;
            mat.sd = sd;
            try (ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(cacheFile))) {
                oos.writeObject(mat);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    } else {
        materialize();
    }
}

From source file:net.nharyes.drivecopy.biz.wfm.FileStorageWorkflowManagerImpl.java

private FileBO download(FileBO file) throws WorkflowManagerException {

    try {/*from  ww  w .j  a  va  2s .  c  o m*/

        // get token
        TokenBO token = getToken();

        // log action
        logger.info(
                String.format("Download entry '%s' to '%s'", file.getName(), file.getFile().getAbsolutePath()));

        // check delete after option
        if (file.isDeleteAfter())
            logger.warning("Delete option ignored");

        // check MIME type option
        if (file.getMimeType() != null)
            logger.warning("MIME type option ignored");

        // check skip revision option
        if (file.isSkipRevision())
            logger.warning("Skip revision option ignored");

        // check create folders option
        if (file.isCreateFolders())
            logger.warning("Create folders option ignored");

        // process folders and get parent ID
        String parentId = driveSdo.getLastFolderId(token, extractFolders(file.getName()), false);

        // search entry
        EntryBO entry = driveSdo.searchEntry(token, extractFileName(file.getName()), parentId);

        // check directory
        if (file.isDirectory()) {

            // create temporary file
            File tempFile = File.createTempFile("drivecopy" + System.currentTimeMillis(), "temp");
            logger.fine(String.format("Created temporary file '%s'", tempFile.getAbsolutePath()));

            // set file property
            entry.setFile(tempFile);

        } else {

            // set file property
            entry.setFile(file.getFile());
        }

        // download entry
        entry = driveSdo.downloadEntry(token, entry);

        // in case check MD5 of the downloaded entry
        if (file.isCheckMd5()) {

            // calculate MD5 of the local file/directory
            logger.info("calculate the MD5 summary of the file...");
            byte[] digest = Files.hash(entry.getFile(), Hashing.md5()).asBytes();
            String sDigest = new BigInteger(1, digest).toString(16);
            logger.fine(String.format("digest of the file: %s", sDigest));
            logger.fine(String.format("digest of the entry: %s", entry.getMd5Sum()));

            // compare digests
            if (!sDigest.equalsIgnoreCase(entry.getMd5Sum()))
                throw new WorkflowManagerException("wrong digest!");
            logger.info("digests comparison OK");
        }

        // check directory
        if (file.isDirectory()) {

            logger.info("Decompress file");

            // decompress file
            DirectoryBO dirBO = new DirectoryBO();
            dirBO.setFile(entry.getFile());
            dirBO.setDestinationDirectory(file.getFile());
            dirBO = directoryCompressorWorkflowManager.handleWorkflow(dirBO,
                    DirectoryCompressorWorkflowManager.ACTION_DECOMPRESS);

            // delete downloaded file
            logger.fine("Delete downloaded file");
            entry.getFile().delete();

            // return decompressed directory
            FileBO fBO = new FileBO();
            fBO.setFile(dirBO.getFile());
            fBO.setName(entry.getName());
            return fBO;
        }

        // return downloaded entry
        FileBO fBO = new FileBO();
        fBO.setFile(entry.getFile());
        fBO.setName(entry.getName());
        return fBO;

    } catch (SdoException ex) {

        // re-throw exception
        throw new WorkflowManagerException(ex.getMessage(), ex);

    } catch (IOException ex) {

        // re-throw exception
        throw new WorkflowManagerException(ex.getMessage(), ex);
    }
}

From source file:com.underthehood.weblogs.utils.TimeuuidGenerator.java

/**
 * Using MAC address, process id, and some env props 
 * @param addr/* ww  w.j  a v  a2 s.c  o m*/
 * @return
 */
private static long makeNode(InetAddress addr) {
    byte[] mac = null;
    if (addr != null) {
        try {
            mac = NetworkInterface.getByInetAddress(addr).getHardwareAddress();

        } catch (SocketException e) {
        }
    }
    if (mac == null) {
        try {
            mac = NetworkInterface.getByInetAddress(getActualIPv4HostAddress()).getHardwareAddress();

            Hasher hash = Hashing.md5().newHasher();
            hash.putBytes(mac);

            String procId = ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
            hash.putBytes(procId.getBytes(StandardCharsets.UTF_8));

            Properties props = System.getProperties();
            hash.putBytes(props.getProperty("os.arch").getBytes(StandardCharsets.UTF_8));
            hash.putBytes(props.getProperty("os.name").getBytes(StandardCharsets.UTF_8));
            hash.putBytes(props.getProperty("os.version").getBytes(StandardCharsets.UTF_8));
            hash.putBytes(props.getProperty("java.vendor").getBytes(StandardCharsets.UTF_8));
            hash.putBytes(props.getProperty("java.version").getBytes(StandardCharsets.UTF_8));

            return hash.hash().asLong();

        } catch (SocketException e) {
            log.warn("Unable to get a valid network interface!");
            log.debug("", e);
        }
    }
    return UUID.randomUUID().getLeastSignificantBits();
}

From source file:org.sonatype.nexus.proxy.maven.maven2.M2GroupRepository.java

/**
 * Aggregates metadata from all member repositories
 *//*from w ww .j a  v  a  2  s . co m*/
private StorageItem doRetrieveMetadata(ResourceStoreRequest request) throws StorageException,
        IllegalOperationException, UnsupportedStorageOperationException, ItemNotFoundException {
    List<StorageItem> items = doRetrieveItems(request);

    if (items.isEmpty()) {
        throw new ItemNotFoundException(
                reasonFor(request, this, "Metadata %s not found in any of the members of %s.",
                        request.getRequestPath(), RepositoryStringUtils.getHumanizedNameString(this)));
    }

    if (!isMergeMetadata()) {
        // not merging: return the 1st and ciao
        return items.get(0);
    }

    List<Metadata> existingMetadatas = new ArrayList<Metadata>();

    try {
        for (StorageItem item : items) {
            if (!(item instanceof StorageFileItem)) {
                break;
            }

            StorageFileItem fileItem = (StorageFileItem) item;

            try {
                existingMetadatas.add(parseMetadata(fileItem));
            } catch (IOException e) {
                log.warn("IOException during parse of metadata UID=\""
                        + fileItem.getRepositoryItemUid().toString() + "\", will be skipped from aggregation!",
                        e);

                eventBus().post(newMetadataFailureEvent(fileItem,
                        "Invalid metadata served by repository. If repository is proxy, please check out what is it serving!"));
            } catch (MetadataException e) {
                log.warn("Metadata exception during parse of metadata from UID=\""
                        + fileItem.getRepositoryItemUid().toString() + "\", will be skipped from aggregation!",
                        e);

                eventBus().post(newMetadataFailureEvent(fileItem,
                        "Invalid metadata served by repository. If repository is proxy, please check out what is it serving!"));
            }
        }

        if (existingMetadatas.isEmpty()) {
            throw new ItemNotFoundException(
                    reasonFor(request, this, "Metadata %s not parseable in any of the members of %s.",
                            request.getRequestPath(), RepositoryStringUtils.getHumanizedNameString(this)));
        }

        Metadata result = existingMetadatas.get(0);

        // do a merge if necessary
        if (existingMetadatas.size() > 1) {
            List<MetadataOperation> ops = new ArrayList<MetadataOperation>();

            for (int i = 1; i < existingMetadatas.size(); i++) {
                ops.add(new NexusMergeOperation(new MetadataOperand(existingMetadatas.get(i))));
            }

            final Collection<MetadataException> metadataExceptions = MetadataBuilder
                    .changeMetadataIgnoringFailures(result, ops);
            if (metadataExceptions != null && !metadataExceptions.isEmpty()) {
                for (final MetadataException metadataException : metadataExceptions) {
                    log.warn("Ignored exception during M2 metadata merging: " + metadataException.getMessage()
                            + " (request " + request.getRequestPath() + ")", metadataException);
                }
            }
        }

        // build the result item
        ByteArrayOutputStream resultOutputStream = new ByteArrayOutputStream();

        MetadataBuilder.write(result, resultOutputStream);

        StorageItem item = createMergedMetadataItem(request, resultOutputStream.toByteArray(), items);

        // build checksum files
        String md5Digest = Hashing.md5().hashBytes(resultOutputStream.toByteArray()).toString();
        String sha1Digest = Hashing.sha1().hashBytes(resultOutputStream.toByteArray()).toString();

        storeMergedMetadataItemDigest(request, md5Digest, items, "MD5");
        storeMergedMetadataItemDigest(request, sha1Digest, items, "SHA1");

        resultOutputStream.close();

        if (log.isDebugEnabled()) {
            log.debug("Item for path " + request.toString() + " merged from " + Integer.toString(items.size())
                    + " found items.");
        }

        return item;

    } catch (IOException e) {
        throw new LocalStorageException("Got IOException during M2 metadata merging.", e);
    } catch (MetadataException e) {
        throw new LocalStorageException("Got MetadataException during M2 metadata merging.", e);
    }
}

From source file:org.jclouds.ssh.SshKeys.java

/**
 * Create a fingerprint per the following <a
 * href="http://tools.ietf.org/html/draft-friedl-secsh-fingerprint-00" >spec</a>
 * /*w  ww  .j ava 2s . c om*/
 * @param publicExponent
 * @param modulus
 * 
 * @return hex fingerprint ex. {@code 2b:a9:62:95:5b:8b:1d:61:e0:92:f7:03:10:e9:db:d9}
 */
public static String fingerprint(BigInteger publicExponent, BigInteger modulus) {
    byte[] keyBlob = keyBlob(publicExponent, modulus);
    return hexColonDelimited(Hashing.md5().hashBytes(keyBlob));
}

From source file:com.rhymestore.store.RedisStore.java

/**
 * Makes a md5 sum of the given text./*from  www.ja v  a 2s  . co m*/
 * 
 * @param value The text to sum.
 * @return The md5 sum of the given text.
 */
@VisibleForTesting
String sum(final String value) {
    return Hashing.md5().hashString(value, encoding).toString();
}

From source file:fr.ens.biologie.genomique.eoulsan.modules.mapping.hadoop.ReadsMapperHadoopModule.java

/**
 * Compute the checksum of a ZIP file.//from   www  .jav a 2s . c o m
 * @param in input stream
 * @return the checksum as a string
 * @throws IOException if an error occurs while creating the checksum
 */
private static String computeZipCheckSum(final InputStream in) throws IOException {

    ZipArchiveInputStream zais = new ZipArchiveInputStream(in);

    // Create Hash function
    final Hasher hs = Hashing.md5().newHasher();

    // Store entries in a map
    final Map<String, long[]> map = new HashMap<>();

    ZipArchiveEntry e;

    while ((e = zais.getNextZipEntry()) != null) {
        map.put(e.getName(), new long[] { e.getSize(), e.getCrc() });
    }

    zais.close();

    // Add values to hash function in an ordered manner
    for (String filename : new TreeSet<>(map.keySet())) {

        hs.putString(filename, StandardCharsets.UTF_8);
        for (long l : map.get(filename)) {
            hs.putLong(l);
        }
    }

    return hs.hash().toString();
}

From source file:com.googlesource.gerrit.plugins.xdocs.XDocServlet.java

private static String computeETag(Project.NameKey project, ObjectId revId, String file, ObjectId revIdB,
        DiffMode diffMode) {//w ww  .  j  a va 2s  .  c o  m
    Hasher hasher = Hashing.md5().newHasher();
    hasher.putUnencodedChars(project.get());
    if (revId != null) {
        hasher.putUnencodedChars(revId.getName());
    }
    hasher.putUnencodedChars(file);
    if (diffMode != DiffMode.NO_DIFF) {
        hasher.putUnencodedChars(revIdB.getName()).putUnencodedChars(diffMode.name());
    }
    return hasher.hash().toString();
}

From source file:org.dspace.ctask.replicate.checkm.TransmitManifest.java

private String tokenized(Path file) throws IOException {
    int i = 0;//  w w w  . jav  a  2s . c  om
    StringBuilder sb = new StringBuilder();
    for (String token : Arrays.asList(template.split("\\|"))) {
        if (!token.startsWith("x")) {
            // tokens are positionally defined
            switch (i) {
            case 0:
                // what URL/name format?
                sb.append(file.getFileName().toString());
                break;
            case 1:
                // Checksum algorithm
                sb.append("md5");
                break;
            case 2:
                // Checksum
                // test this! RLR
                sb.append(com.google.common.io.Files.hash(file.toFile(), Hashing.md5()).toString());
                //sb.append(Utils.checksum(file, "md5"));
                break;
            case 3:
                // length
                sb.append(Files.size(file));
                break;
            case 4:
                // modified - use item level data?
                sb.append(Files.getLastModifiedTime(file).toMillis());
                break;
            case 5:
                // target name - skip for now
            default:
                break;
            }
        }
        sb.append("|");
        i++;
    }
    return sb.substring(0, sb.length() - 1);
}