Example usage for org.apache.commons.codec.digest DigestUtils shaHex

List of usage examples for org.apache.commons.codec.digest DigestUtils shaHex

Introduction

In this page you can find the example usage for org.apache.commons.codec.digest DigestUtils shaHex.

Prototype

@Deprecated
    public static String shaHex(String data) 

Source Link

Usage

From source file:com.enonic.cms.core.xslt.lib.PortalFunctionsMediatorImpl.java

@Override
public String sha(final String value) {
    return DigestUtils.shaHex(value);
}

From source file:be.fedict.eid.dss.webapp.ProtocolEntryServlet.java

private boolean isValid(RPEntity rp, DSSRequest dssRequest, HttpServletRequest request,
        HttpServletResponse response) throws IOException {

    LOG.debug("found RP: " + rp.getName());

    if (rp.isRequestSigningRequired()) {
        if (null == dssRequest.getServiceCertificateChain()) {
            error(request, response, "Request was not signed, which is required for this SP!");
            return false;
        }//from w w w  .j av a  2  s  .  c  om
    }

    if (null != dssRequest.getServiceCertificateChain()) {

        X509Certificate serviceCertificate = dssRequest.getServiceCertificateChain().get(0);

        if (null != serviceCertificate && null != rp.getEncodedCertificate()) {

            LOG.debug("verify service signature certificate fingerprint against " + "RP's configuration...");

            // verify fingerprint
            // TODO: for now first using fingerprint of value of leaf
            // certificate, expand later for service key rollover scenarios.
            try {
                String rpFingerprint = DigestUtils.shaHex(rp.getEncodedCertificate());
                String requestFingerPrint = DigestUtils.shaHex(serviceCertificate.getEncoded());

                if (!rpFingerprint.equals(requestFingerPrint)) {
                    error(request, response, "Request was not signed with the correct keystore!");
                    return false;
                }
            } catch (CertificateEncodingException e) {
                return false;
            }

        }
    } else {
        LOG.debug("No signature and signature is not required, valid...");
    }

    request.getSession().setAttribute(View.RP_SESSION_ATTRIBUTE, rp);
    return true;
}

From source file:com.adobe.acs.commons.analysis.jcrchecksum.impl.JSONGenerator.java

private static void outputPropertyValue(Property property, Value value, JsonWriter out)
        throws RepositoryException, IllegalStateException, IOException {

    if (value.getType() == PropertyType.STRING) {
        out.value(value.getString());/* w  w  w  .  j a v a2  s . c om*/
    } else if (value.getType() == PropertyType.BINARY) {
        try {
            java.io.InputStream stream = value.getBinary().getStream();
            String ckSum = DigestUtils.shaHex(stream);
            stream.close();
            out.value(ckSum);
        } catch (IOException e) {
            out.value("ERROR: calculating hash for binary of " + property.getPath() + " : " + e.getMessage());
        }
    } else if (value.getType() == PropertyType.BOOLEAN) {
        out.value(value.getBoolean());
    } else if (value.getType() == PropertyType.DATE) {
        Calendar cal = value.getDate();
        if (cal != null) {
            out.beginObject();
            out.name("type");
            out.value(PropertyType.nameFromValue(value.getType()));
            out.name("val");
            out.value(cal.getTime().toString());
            out.endObject();
        }
    } else if (value.getType() == PropertyType.LONG) {
        out.value(value.getLong());
    } else if (value.getType() == PropertyType.DOUBLE) {
        out.value(value.getDouble());
    } else if (value.getType() == PropertyType.DECIMAL) {
        out.value(value.getDecimal());
    } else {
        out.beginObject();
        out.name("type");
        out.value(PropertyType.nameFromValue(value.getType()));
        out.name("val");
        out.value(value.getString());
        out.endObject();
    }
}

From source file:com.ebay.pulsar.analytics.metricstore.druid.having.HavingTest.java

public void testLessThanHaving() {
    String aggregate = "Aggregate";
    String value = "123";
    LessThanHaving having = new LessThanHaving(aggregate, value);

    String aggregate1 = having.getAggregation();
    String value1 = having.getValue();
    assertEquals("Aggregate NOT Equals", aggregate, aggregate1);
    assertEquals("Value NOT Equals", value, value1);

    byte[] cacheKey = having.cacheKey();

    String hashCacheKeyExpected = "6d73ae7ef7c7d29b751d0223a22030e39dbca31e";
    String hashCacheKeyGot = DigestUtils.shaHex(cacheKey);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected, hashCacheKeyGot);
    HavingType type = having.getType();// w w w .  jav a  2  s . c  o m
    assertEquals("HavingType NOT Equals", HavingType.lessThan, type);

    LessThanHaving having1 = new LessThanHaving(aggregate, null);

    byte[] cacheKey1 = having1.cacheKey();

    String hashCacheKeyExpected1 = "4dc371b14fa0e56d4f85634fb751ae3480da4c3a";
    String hashCacheKeyGot1 = DigestUtils.shaHex(cacheKey1);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected1, hashCacheKeyGot1);

    String aggregate2 = "Aggregate";
    String value2 = "123";
    LessThanHaving having2 = new LessThanHaving(null, null);
    assertTrue(!having2.equals(having));
    assertTrue(!having.equals(having2));
    having2 = new LessThanHaving(aggregate2, null);
    assertTrue(!having2.equals(having));
    assertTrue(!having.equals(having2));
    having2 = new LessThanHaving(aggregate2, value2);
    assertTrue(having2.equals(having));
    assertTrue(having.equals(having2));
    assertTrue(having2.hashCode() == having.hashCode());
    assertTrue(having2.equals(having2));
    assertTrue(!having2.equals(new NotHaving(having2) {

    }));
}

From source file:com.ebay.pulsar.analytics.metricstore.druid.aggregator.AggregatorTest.java

public void testHyperUniqueAggregator() {
    String aggregatorName = "HyperUniqueAggrTest";
    String fieldName = "FieldName";

    HyperUniqueAggregator hyperUniqueAggr = new HyperUniqueAggregator(aggregatorName, fieldName);

    String fieldNameGot = hyperUniqueAggr.getFieldName();
    assertEquals("FieldName must be 'FieldName'", fieldName, fieldNameGot);

    String aggregatorNameGot = hyperUniqueAggr.getName();
    assertEquals("Name must be 'HyperUniqueAggrTest'", aggregatorName, aggregatorNameGot);

    byte[] cacheKey = hyperUniqueAggr.cacheKey();

    String hashCacheKeyExpected = "0f7527d25c5d69a8c58a403632b06aa23394abf7";
    String hashCacheKeyGot = DigestUtils.shaHex(cacheKey);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected, hashCacheKeyGot);

    String aggregatorName2 = "HyperUniqueAggrTest2";
    String fieldName2 = "FieldName2";
    HyperUniqueAggregator agg2 = new HyperUniqueAggregator(aggregatorName2, fieldName2);
    HyperUniqueAggregator agg3 = new HyperUniqueAggregator(null, null);
    assertTrue(!agg3.equals(agg2));//from  w  w w .j a va  2s .c  om
    assertTrue(!agg2.equals(agg3));
    HyperUniqueAggregator agg4 = new HyperUniqueAggregator(aggregatorName2, null);
    assertTrue(!agg4.equals(agg2));
    assertTrue(!agg4.equals(agg3));
    HyperUniqueAggregator agg5 = new HyperUniqueAggregator(aggregatorName2, fieldName2);
    assertTrue(agg5.equals(agg2));
    assertTrue(agg2.equals(agg5));
    assertTrue(!agg2.equals(new CountAggregator(aggregatorName2)));
    assertTrue(agg5.hashCode() == agg2.hashCode());
}

From source file:gov.llnl.ontology.mapreduce.table.WordNetEvidenceTable.java

/**
 * {@inheritDoc}//from  ww w  .j  ava 2s . co m
 */
public void putDependencyPaths(String word1, String word2, String source, Counter<String> pathCounts) {
    String key = word1 + ":" + word2;
    Put put = new Put(DigestUtils.shaHex(key).getBytes());
    SchemaUtil.add(put, DEPENDENCY_FEATURE_CF, source, pathCounts);
    SchemaUtil.add(put, NOUN_PAIR_CF, NOUN_PAIR_COLUMN, key);
    put(put);
}

From source file:com.adobe.acs.commons.analysis.jcrchecksum.impl.ChecksumGeneratorImplTest.java

@Test
public void testGeneratedNodeChecksum() throws RepositoryException, IOException {
    final Node node = session.getRootNode().addNode("page/jcr:content");
    node.setProperty("jcr:title", "My Title");
    node.setProperty("jcr:description", "This is my test node");
    node.setProperty("long", new Long(100));
    node.setProperty("double", new Double(99.99));
    node.setProperty("boolean", true);
    session.save();/* www.  ja  v  a  2  s .c  o m*/

    final String raw = "jcr:content/boolean=" + DigestUtils.shaHex("true") + "jcr:content/double="
            + DigestUtils.shaHex("99.99") + "jcr:content/jcr:description="
            + DigestUtils.shaHex("This is my test node") + "jcr:content/jcr:primaryType="
            + DigestUtils.shaHex("nt:unstructured") + "jcr:content/jcr:title=" + DigestUtils.shaHex("My Title")
            + "jcr:content/long=" + DigestUtils.shaHex("100");

    final String propertiesChecksum = DigestUtils.shaHex(raw);
    final String expected = DigestUtils.shaHex("jcr:content=" + propertiesChecksum);

    CustomChecksumGeneratorOptions opts = new CustomChecksumGeneratorOptions();
    opts.addSortedProperties(new String[] { "sorted" });
    opts.addIncludedNodeTypes(new String[] { "nt:unstructured" });

    final Map<String, String> actual = checksumGenerator.generateChecksums(session, node.getPath(), opts);

    assertEquals(expected, actual.get("/page/jcr:content"));
}

From source file:com.ebay.pulsar.analytics.metricstore.druid.query.QueryTest.java

public void testGroupByQuery() {
    String dataSource = "GroupByQueryTest";
    List<String> intervals = getIntervals();
    List<String> dimensions = getDimensions();
    List<BaseAggregator> aggregations = getAggregators();
    BaseGranularity granularity = BaseGranularity.ALL;
    int limit = 10;

    // GroupByQuery with SimpleGranularity
    GroupByQuery groupByQuery = new GroupByQuery(dataSource, intervals, granularity, aggregations, dimensions);

    String sort = "metric";
    OrderByColumnSpec orderByColumnSpec = new OrderByColumnSpec(sort, SortDirection.descending);

    String sortGot = orderByColumnSpec.getDimension();
    SortDirection sortDirection = orderByColumnSpec.getDirection();
    assertEquals("Sort NOT Equals", sort, sortGot);
    assertEquals("SortDirection NOT Equals", SortDirection.descending, sortDirection);

    List<OrderByColumnSpec> columns = new ArrayList<OrderByColumnSpec>();
    columns.add(orderByColumnSpec);/*from ww w.j a v  a2  s  . co  m*/
    DefaultLimitSpec defaultLimitSpec = new DefaultLimitSpec(limit, columns);

    groupByQuery.setLimitSpec(defaultLimitSpec);

    defaultLimitSpec.getColumns();
    OrderByColumnSpec columnSpecGot = columns.get(0);
    int limitGot = defaultLimitSpec.getLimit();
    String typeLimtSpec = defaultLimitSpec.getType();
    assertEquals("ColumnSpec NOT Equals", sort, columnSpecGot.getDimension());
    assertEquals("Limit NOT Equals", limit, limitGot);
    assertEquals("TYPE NOT Equals", "default", typeLimtSpec);

    String dataSourceGot = groupByQuery.getDataSource();
    List<String> intervalsGot = groupByQuery.getIntervals();
    List<String> dimensionsGot = groupByQuery.getDimensions();
    groupByQuery.getAggregations();

    assertEquals("DataSource NOT Equals", dataSource, dataSourceGot);
    assertEquals("Intervals NOT Equals", intervals.get(0), intervalsGot.get(0));
    assertEquals("Dimensions NOT Equals", dimensions.get(0), dimensionsGot.get(0));

    byte[] cacheKey = groupByQuery.cacheKey();

    String hashCacheKeyExpected = "9f39d23b76cd0fec69ff694978ca116bec16702d";
    String hashCacheKeyGot = DigestUtils.shaHex(cacheKey);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected, hashCacheKeyGot);

    groupByQuery.toString();
    BaseFilter filter = getFilter();
    groupByQuery.setFilter(filter);

    String dim = "Aggregate";
    String val = "Value";
    EqualToHaving having = new EqualToHaving(dim, val);

    groupByQuery.setHaving(having);

    List<BasePostAggregator> postAggregations = getPostAggregators();
    groupByQuery.setPostAggregations(postAggregations);

    cacheKey = groupByQuery.cacheKey();

    hashCacheKeyExpected = "01239ed6b654c8413a5e2459e136f50da89bbb27";
    hashCacheKeyGot = DigestUtils.shaHex(cacheKey);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected, hashCacheKeyGot);

    // GroupByQuery with DurationGranularity
    DurationGranularity durationGranularity1 = new DurationGranularity("7200000");
    DurationGranularity durationGranularity2 = new DurationGranularity("7200000", "1970-01-01T00:07:00Z");

    GroupByQuery groupByQuery1 = new GroupByQuery(dataSource, intervals, durationGranularity1, aggregations,
            dimensions);
    GroupByQuery groupByQuery2 = new GroupByQuery(dataSource, intervals, durationGranularity2, aggregations,
            dimensions);

    groupByQuery1.setLimitSpec(defaultLimitSpec);
    groupByQuery2.setLimitSpec(defaultLimitSpec);

    byte[] cacheKey1 = groupByQuery1.cacheKey();

    String hashCacheKeyExpected1 = "e92e05aa14c9e21bbdbb64268ef5ecf60887e291";
    String hashCacheKeyGot1 = DigestUtils.shaHex(cacheKey1);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected1, hashCacheKeyGot1);

    byte[] cacheKey2 = groupByQuery2.cacheKey();

    String hashCacheKeyExpected2 = "fd1c68cf2b3c95f7845d5d85255010d4627a66a5";
    String hashCacheKeyGot2 = DigestUtils.shaHex(cacheKey2);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected2, hashCacheKeyGot2);

    durationGranularity1.getOrigin();
    durationGranularity1.getDuration();

    PeriodGranularity periodGranularity1 = new PeriodGranularity("P2D");
    PeriodGranularity periodGranularity2 = new PeriodGranularity("P2D", "MST");
    PeriodGranularity periodGranularity3 = new PeriodGranularity("P2D", "MST", "1970-01-01T00:07:00");

    GroupByQuery groupByQueryP1 = new GroupByQuery(dataSource, intervals, periodGranularity1, aggregations,
            dimensions);
    GroupByQuery groupByQueryP2 = new GroupByQuery(dataSource, intervals, periodGranularity2, aggregations,
            dimensions);
    GroupByQuery groupByQueryP3 = new GroupByQuery(dataSource, intervals, periodGranularity3, aggregations,
            dimensions);

    periodGranularity1.getOrigin();
    periodGranularity1.getTimeZone();
    periodGranularity1.getPeriod();

    groupByQueryP1.setLimitSpec(defaultLimitSpec);
    groupByQueryP2.setLimitSpec(defaultLimitSpec);
    groupByQueryP3.setLimitSpec(defaultLimitSpec);

    byte[] cacheKeyP1 = groupByQueryP1.cacheKey();

    String hashCacheKeyExpectedP1 = "45b384d4efbc33dba3324d7ea34ea12c6b6c0829";
    String hashCacheKeyGotP1 = DigestUtils.shaHex(cacheKeyP1);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpectedP1, hashCacheKeyGotP1);

    byte[] cacheKeyP2 = groupByQueryP2.cacheKey();

    String hashCacheKeyExpectedP2 = "965b913a29ea5ddc646319219f0bbfcb39ba7827";
    String hashCacheKeyGotP2 = DigestUtils.shaHex(cacheKeyP2);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpectedP2, hashCacheKeyGotP2);

    byte[] cacheKeyP3 = groupByQueryP3.cacheKey();

    String hashCacheKeyExpectedP3 = "4d1028224ec58076f47f7ec1d9cac5e2408a624a";
    String hashCacheKeyGotP3 = DigestUtils.shaHex(cacheKeyP3);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpectedP3, hashCacheKeyGotP3);

    QueryType type = groupByQuery.getQueryType();
    assertEquals("Type NOT Equals", QueryType.groupBy, type);
    Constants constants = new Constants();
    assertNotNull(constants);
}

From source file:gov.llnl.ontology.mapreduce.table.TrinidadTable.java

/**
 * {@inheritDoc}//from w  w w  .ja  v  a2s. co  m
 */
public void put(Document document) {
    if (document == null)
        return;

    Put put = new Put(DigestUtils.shaHex(document.key()).getBytes());
    // Temporarily blank everything else out so that we can add in the keys
    // and ids to every document that's already been added.
    SchemaUtil.add(put, SOURCE_CF, SOURCE_NAME, document.sourceCorpus());
    SchemaUtil.add(put, TEXT_CF, TEXT_ORIGINAL, document.originalText());
    SchemaUtil.add(put, TEXT_CF, TEXT_RAW, document.rawText());
    SchemaUtil.add(put, TEXT_CF, TEXT_TITLE, document.title());
    SchemaUtil.add(put, TEXT_CF, TEXT_TYPE, XML_MIME_TYPE);
    SchemaUtil.add(put, META_CF, DOC_KEY, document.key());
    SchemaUtil.add(put, META_CF, DOC_ID, Long.toString(document.id()));
    SchemaUtil.add(put, META_CF, CATEGORY_COLUMN, document.categories());
    put(put);
}

From source file:com.ebay.pulsar.analytics.metricstore.druid.postaggregator.PostAggregatorTest.java

public void testFieldAccessPostAggregator() {
    String postAggrName = "FieldAccessTest";

    String fieldName = "FieldName";
    FieldAccessorPostAggregator postAggr = new FieldAccessorPostAggregator(postAggrName, fieldName);

    byte[] cacheKey = postAggr.cacheKey();

    String hashCacheKeyExpected = "22c95e82dfec8270f675c56caca53c535c043ca7";
    String hashCacheKeyGot = DigestUtils.shaHex(cacheKey);
    assertEquals("Hash of cacheKey NOT Equals", hashCacheKeyExpected, hashCacheKeyGot);

    String fieldName1 = postAggr.getFieldName();
    assertEquals("FieldNames NOT Equals", fieldName, fieldName1);

    PostAggregatorType type = postAggr.getType();
    assertEquals("Type NOT Equals", PostAggregatorType.fieldAccess, type);

    String postAggrName2 = "FieldAccessTest2";
    String fieldName2 = "FieldName2";
    FieldAccessorPostAggregator postAggr0 = new FieldAccessorPostAggregator(postAggrName, fieldName);
    assertTrue(postAggr0.equals(postAggr0));
    assertTrue(postAggr0.equals(postAggr));
    FieldAccessorPostAggregator postAggr1 = new FieldAccessorPostAggregator(null, null);
    assertTrue(!postAggr1.equals(postAggr0));
    assertTrue(!postAggr0.equals(postAggr1));
    postAggr1 = new FieldAccessorPostAggregator(postAggrName, null);
    assertTrue(!postAggr1.equals(postAggr0));
    assertTrue(!postAggr0.equals(postAggr1));
    postAggr1 = new FieldAccessorPostAggregator(postAggrName, fieldName2);
    assertTrue(!postAggr1.equals(postAggr0));
    assertTrue(!postAggr0.equals(postAggr1));
    postAggr1 = new FieldAccessorPostAggregator(postAggrName2, fieldName);
    assertTrue(!postAggr1.equals(postAggr0));
    assertTrue(!postAggr0.equals(postAggr1));

    postAggr1 = new FieldAccessorPostAggregator(postAggrName, fieldName);
    assertTrue(postAggr1.equals(postAggr0));
    assertTrue(postAggr0.equals(postAggr1));
    assertTrue(postAggr1.hashCode() == postAggr0.hashCode());
    assertTrue(!postAggr1.equals(new Object()));
}