Example usage for org.apache.commons.codec.digest DigestUtils md2Hex

List of usage examples for org.apache.commons.codec.digest DigestUtils md2Hex

Introduction

In this page you can find the example usage for org.apache.commons.codec.digest DigestUtils md2Hex.

Prototype

public static String md2Hex(String data) 

Source Link

Usage

From source file:co.cask.hydrator.plugin.HasherTest.java

@Test
public void testHasherMD2() throws Exception {
    Transform<StructuredRecord, StructuredRecord> transform = new Hasher(new Hasher.Config("MD2", "a,b,e"));
    transform.initialize(null);//from   www . jav  a2s .  c o  m

    MockEmitter<StructuredRecord> emitter = new MockEmitter<>();
    transform.transform(StructuredRecord.builder(INPUT).set("a", "Field A").set("b", "Field B")
            .set("c", "Field C").set("d", 4).set("e", "Field E").build(), emitter);

    ;
    Assert.assertEquals(5, emitter.getEmitted().get(0).getSchema().getFields().size());
    Assert.assertEquals(DigestUtils.md2Hex("Field A"), emitter.getEmitted().get(0).get("a"));
    Assert.assertEquals(DigestUtils.md2Hex("Field B"), emitter.getEmitted().get(0).get("b"));
    Assert.assertEquals("Field C", emitter.getEmitted().get(0).get("c"));
    Assert.assertEquals(4, emitter.getEmitted().get(0).get("d"));
    Assert.assertEquals(DigestUtils.md2Hex("Field E"), emitter.getEmitted().get(0).get("e"));
}

From source file:fm.last.peyote.cacti.hash.CactiHashKeyGenerator.java

protected String generateHash(String type) {
    String md5Hash = DigestUtils.md2Hex("peyote" + System.nanoTime());
    return "hash_" + type + getVersion() + md5Hash;
}

From source file:co.cask.hydrator.transforms.Hasher.java

@Override
public void transform(StructuredRecord in, Emitter<StructuredRecord> emitter) throws Exception {
    StructuredRecord.Builder builder = StructuredRecord.builder(in.getSchema());

    List<Schema.Field> fields = in.getSchema().getFields();
    for (Schema.Field field : fields) {
        String name = field.getName();
        if (okToHash(name) && field.getSchema().getType() == Schema.Type.STRING) {
            String value = in.get(name);
            String digest = value;
            switch (config.hash.toLowerCase()) {
            case "md2":
                digest = DigestUtils.md2Hex(value);
                break;
            case "md5":
                digest = DigestUtils.md5Hex(value);
                break;
            case "sha1":
                digest = DigestUtils.sha1Hex(value);
                break;
            case "sha256":
                digest = DigestUtils.sha256Hex(value);
                break;
            case "sha384":
                digest = DigestUtils.sha384Hex(value);
                break;
            case "sha512":
                digest = DigestUtils.sha256Hex(value);
                break;
            }/* ww w.j  a v  a 2s  .c o  m*/
            builder.set(name, digest);
        } else {
            builder.set(name, in.get(name));
        }
    }
    emitter.emit(builder.build());
}

From source file:co.cask.hydrator.plugin.Hasher.java

@Override
public void transform(StructuredRecord in, Emitter<StructuredRecord> emitter) throws Exception {
    StructuredRecord.Builder builder = StructuredRecord.builder(in.getSchema());

    List<Schema.Field> fields = in.getSchema().getFields();
    for (Schema.Field field : fields) {
        String name = field.getName();
        if (fieldSet.contains(name) && field.getSchema().getType() == Schema.Type.STRING) {
            String value = in.get(name);
            String digest = value;
            switch (config.hash.toLowerCase()) {
            case "md2":
                digest = DigestUtils.md2Hex(value);
                break;
            case "md5":
                digest = DigestUtils.md5Hex(value);
                break;
            case "sha1":
                digest = DigestUtils.sha1Hex(value);
                break;
            case "sha256":
                digest = DigestUtils.sha256Hex(value);
                break;
            case "sha384":
                digest = DigestUtils.sha384Hex(value);
                break;
            case "sha512":
                digest = DigestUtils.sha512Hex(value);
                break;
            }/*from   ww  w .  j  a v  a  2 s  .  c o  m*/
            builder.set(name, digest);
        } else {
            builder.set(name, in.get(name));
        }
    }
    emitter.emit(builder.build());
}

From source file:com.nebkat.plugin.text.TextPlugin.java

@EventHandler
@CommandFilter("hash")
public void onHashCommand(CommandEvent e) {
    if (e.getParams().length < 2) {
        e.showUsage(getBot());/*ww w  .ja  v a  2s  .c om*/
        return;
    }
    String algorithm = e.getParams()[0];
    String text = e.getRawParams().substring(algorithm.length() + 1);
    String result = "Algorithm unsupported";
    if (algorithm.equalsIgnoreCase("md5")) {
        result = DigestUtils.md5Hex(text);
    } else if (algorithm.equalsIgnoreCase("md2")) {
        result = DigestUtils.md2Hex(text);
    } else if (algorithm.equalsIgnoreCase("sha1") || algorithm.equalsIgnoreCase("sha")) {
        result = DigestUtils.sha1Hex(text);
    } else if (algorithm.equalsIgnoreCase("sha256")) {
        result = DigestUtils.sha256Hex(text);
    } else if (algorithm.equalsIgnoreCase("sha384")) {
        result = DigestUtils.sha384Hex(text);
    } else if (algorithm.equalsIgnoreCase("sha512")) {
        result = DigestUtils.sha512Hex(text);
    }
    Irc.message(e.getSession(), e.getTarget(), e.getSource().getNick() + ": " + result);
}

From source file:org.apache.syncope.client.enduser.util.SaltGenerator.java

public static String generate(final String input) {
    // generate salt
    byte[] salt = new byte[16];
    // fill array with random bytes
    new SecureRandom().nextBytes(salt);
    // create digest with MD5
    return DigestUtils.md2Hex(input + Base64.encodeBase64String(salt));
}

From source file:org.apache.tika.parser.BouncyCastleDigestingParserTest.java

private void addTruth(Path tmp, String algo, Metadata truth) throws IOException {
    String digest = null;//from w  w  w .j ava2 s .co m
    //for now, rely on CommonsDigest for truth
    try (InputStream is = Files.newInputStream(tmp)) {
        if ("MD2".equals(algo)) {
            digest = DigestUtils.md2Hex(is);
        } else if ("MD5".equals(algo)) {
            digest = DigestUtils.md5Hex(is);
        } else if ("SHA1".equals(algo)) {
            digest = DigestUtils.sha1Hex(is);
        } else if ("SHA256".equals(algo)) {
            digest = DigestUtils.sha256Hex(is);
        } else if ("SHA384".equals(algo)) {
            digest = DigestUtils.sha384Hex(is);
        } else if ("SHA512".equals(algo)) {
            digest = DigestUtils.sha512Hex(is);
        } else {
            throw new IllegalArgumentException("Sorry, not aware of algorithm: " + algo);
        }
    }
    truth.set(P + algo, digest);

}

From source file:org.apache.tika.parser.DigestingParserTest.java

private void addTruth(Path tmp, CommonsDigester.DigestAlgorithm algo, Metadata truth) throws IOException {
    String digest = null;/*w  w  w  .  j ava 2 s .  c o  m*/
    try (InputStream is = Files.newInputStream(tmp)) {
        switch (algo) {
        case MD2:
            digest = DigestUtils.md2Hex(is);
            break;
        case MD5:
            digest = DigestUtils.md5Hex(is);
            break;
        case SHA1:
            digest = DigestUtils.sha1Hex(is);
            break;
        case SHA256:
            digest = DigestUtils.sha256Hex(is);
            break;
        case SHA384:
            digest = DigestUtils.sha384Hex(is);
            break;
        case SHA512:
            digest = DigestUtils.sha512Hex(is);
            break;
        default:
            throw new IllegalArgumentException("Sorry, not aware of algorithm: " + algo.toString());
        }
    }
    truth.set(P + algo.name(), digest);

}

From source file:org.apache.tika.parser.utils.CommonsDigester.java

/**
 *
 * @param algorithm algo to use/*  w  w  w  .ja v  a2  s.  c  om*/
 * @param is input stream to read from
 * @param metadata metadata for reporting the digest
 * @return whether or not this finished the input stream
 * @throws IOException
 */
private boolean digestEach(DigestAlgorithm algorithm, InputStream is, Metadata metadata) throws IOException {
    String digest = null;
    try {
        switch (algorithm) {
        case MD2:
            digest = DigestUtils.md2Hex(is);
            break;
        case MD5:
            digest = DigestUtils.md5Hex(is);
            break;
        case SHA1:
            digest = DigestUtils.sha1Hex(is);
            break;
        case SHA256:
            digest = DigestUtils.sha256Hex(is);
            break;
        case SHA384:
            digest = DigestUtils.sha384Hex(is);
            break;
        case SHA512:
            digest = DigestUtils.sha512Hex(is);
            break;
        default:
            throw new IllegalArgumentException("Sorry, not aware of algorithm: " + algorithm.toString());
        }
    } catch (IOException e) {
        e.printStackTrace();
        //swallow, or should we throw this?
    }
    if (is instanceof SimpleBoundedInputStream) {
        if (((SimpleBoundedInputStream) is).hasHitBound()) {
            return false;
        }
    }
    metadata.set(algorithm.getMetadataKey(), digest);
    return true;
}

From source file:org.craftercms.studio.impl.v1.service.deployment.DmPublishServiceImpl.java

@Override
public void bulkGoLive(String site, String environment, String path) {
    logger.info("Starting Bulk Go Live for path " + path + " site " + site);

    List<String> childrenPaths = new ArrayList<String>();
    ContentItemTO item = contentService.getContentItem(site, path, 2);
    logger.debug("Traversing subtree for site " + site + " and root path " + path);
    if (item != null) {
        if (!item.isFolder()) {
            childrenPaths.add(item.getUri());
        }//from   ww  w.j  a v a  2s.co  m
        if (item.getUri().endsWith("/" + DmConstants.INDEX_FILE)
                && objectMetadataManager.isRenamed(site, item.getUri())) {
            getAllMandatoryChildren(site, item, childrenPaths);
        } else {
            if (item.isFolder() || item.isContainer()) {
                getAllMandatoryChildren(site, item, childrenPaths);
            }
        }
    }
    logger.debug(
            "Collected " + childrenPaths.size() + " content items for site " + site + " and root path " + path);
    Set<String> processedPaths = new HashSet<String>();
    Date launchDate = new Date();
    for (String childPath : childrenPaths) {
        String childHash = DigestUtils.md2Hex(childPath);
        logger.debug("Processing dependencies for site " + site + " path " + childPath);
        if (processedPaths.add(childHash)) {
            List<String> pathsToPublish = new ArrayList<String>();
            List<String> candidatePathsToPublish = new ArrayList<String>();
            pathsToPublish.add(childPath);
            candidatePathsToPublish.add(childPath);
            getAllDependenciesRecursive(site, childPath, candidatePathsToPublish);
            for (String pathToAdd : candidatePathsToPublish) {
                String hash = DigestUtils.md2Hex(pathToAdd);
                if (processedPaths.add(hash)) {
                    pathsToPublish.add(pathToAdd);
                }
            }
            String aprover = securityService.getCurrentUser();
            String comment = "Bulk Go Live invoked by " + aprover;
            logger.debug("Deploying package of " + pathsToPublish.size() + " items for site " + site + " path "
                    + childPath);
            try {
                deploymentService.deploy(site, environment, pathsToPublish, launchDate, aprover, comment, true);
            } catch (DeploymentException e) {
                logger.error("Error while running bulk Go Live operation", e);
            } finally {
                logger.debug("Finished processing deployment package for path " + childPath + " site " + site);
            }
        }
    }
    logger.info("Finished Bulk Go Live for path " + path + " site " + site);
}