Example usage for java.util.concurrent ConcurrentHashMap compute

List of usage examples for java.util.concurrent ConcurrentHashMap compute

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentHashMap compute.

Prototype

public V compute(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) 

Source Link

Document

Attempts to compute a mapping for the specified key and its current mapped value (or null if there is no current mapping).

Usage

From source file:io.pravega.controller.eventProcessor.impl.SerializedRequestHandlerTest.java

@Test(timeout = 10000)
public void testProcessEvent() throws InterruptedException, ExecutionException {
    final ConcurrentHashMap<String, List<Integer>> orderOfProcessing = new ConcurrentHashMap<>();

    SerializedRequestHandler<TestEvent> requestHandler = new SerializedRequestHandler<TestEvent>(
            executorService()) {//w  ww.jav a 2  s  .co  m
        @Override
        public CompletableFuture<Void> processEvent(TestEvent event) {
            orderOfProcessing.compute(event.getKey(), (x, y) -> {
                if (y == null) {
                    y = new ArrayList<>();
                }
                y.add(event.getNumber());
                return y;
            });
            return event.getFuture();
        }
    };

    List<Pair<TestEvent, CompletableFuture<Void>>> stream1Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNull(stream1Queue);
    // post 3 work for stream1
    TestEvent s1e1 = new TestEvent("scope", "stream1", 1);
    CompletableFuture<Void> s1p1 = requestHandler.process(s1e1);
    TestEvent s1e2 = new TestEvent("scope", "stream1", 2);
    CompletableFuture<Void> s1p2 = requestHandler.process(s1e2);
    TestEvent s1e3 = new TestEvent("scope", "stream1", 3);
    CompletableFuture<Void> s1p3 = requestHandler.process(s1e3);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    List<Integer> collect = stream1Queue.stream().map(x -> x.getLeft().getNumber())
            .collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e3.complete();

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));

    // verify that no processing is complete
    assertTrue(stream1Queue.size() >= 2);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    // post 3 work for stream2
    TestEvent s2e1 = new TestEvent("scope", "stream2", 1);
    CompletableFuture<Void> s2p1 = requestHandler.process(s2e1);
    TestEvent s2e2 = new TestEvent("scope", "stream2", 2);
    CompletableFuture<Void> s2p2 = requestHandler.process(s2e2);
    TestEvent s2e3 = new TestEvent("scope", "stream2", 3);
    CompletableFuture<Void> s2p3 = requestHandler.process(s2e3);

    List<Pair<TestEvent, CompletableFuture<Void>>> stream2Queue = requestHandler
            .getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream2Queue.size() >= 2);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.indexOf(2) < collect.indexOf(3));

    s1e1.complete();
    Futures.await(s1p1);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertTrue(stream1Queue.size() >= 1);
    assertTrue(stream1Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream1Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now make sure that we have concurrently run for two streams
    s2e1.complete();
    Futures.await(s2p1);

    stream2Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2"));
    assertTrue(stream2Queue.size() >= 1);
    assertTrue(stream2Queue.stream().noneMatch(x -> x.getRight().isDone()));
    collect = stream2Queue.stream().map(x -> x.getLeft().getNumber()).collect(Collectors.toList());
    assertTrue(collect.contains(3));

    // now complete all processing
    s2e2.complete();
    Futures.await(s2p2);

    s2e3.complete();

    s1e2.complete();
    Futures.await(s1p2);

    Futures.await(s1p3);
    Futures.await(s2p3);

    assertTrue(
            orderOfProcessing.get(s1e1.getKey()).get(0) == 1 && orderOfProcessing.get(s1e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s1e1.getKey()).get(2) == 3);
    assertTrue(
            orderOfProcessing.get(s2e1.getKey()).get(0) == 1 && orderOfProcessing.get(s2e1.getKey()).get(1) == 2
                    && orderOfProcessing.get(s2e1.getKey()).get(2) == 3);

    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());
    Futures.loop(() -> requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream2")) == null,
            () -> CompletableFuture.completedFuture(null), executorService());

    // now that we have drained all the work from the processor.
    // let's post new work for stream 1
    TestEvent s1e4 = new TestEvent("scope", "stream1", 4);
    CompletableFuture<Void> s1p4 = requestHandler.process(s1e4);

    stream1Queue = requestHandler.getEventQueueForKey(getKeyForStream("scope", "stream1"));
    assertNotNull(stream1Queue);

    s1e4.complete();
    Futures.await(s1p4);

    assertTrue(orderOfProcessing.get(s1e1.getKey()).get(3) == 4);
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateContentType(ConcurrentHashMap<String, Long> contentTypeCounter, Metacard metacard) {
    String contentType = metacard.getContentTypeName();
    if (StringUtils.isNotBlank(contentType)) {
        contentTypeCounter.compute(contentType, (k, v) -> {
            return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
        });/*from  ww w  .ja v a 2 s .c  o  m*/
    }
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateMIMEType(String sourceId, ConcurrentHashMap<String, Long> mimeCounter, Result result,
        Metacard metacard, XPathHelper xpathHelper) {
    generatorConfig.getMimeTypesXPaths().forEach(xpath -> {
        try {//from www .ja  v a2  s .c  o m
            String mimeType = xpathHelper.evaluate(xpath);
            LOGGER.trace("MIME Type info from site {} record {}: {}", sourceId, metacard.getId(), mimeType);
            if (StringUtils.isNotBlank(mimeType)) {
                mimeCounter.compute(mimeType, (k, v) -> {
                    return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
                });
            }
        } catch (Exception e) {
            LOGGER.error("Error extracting MIMETypes {} ", result.getMetacard().getId(), e);
        }
    });
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateKeywords(String sourceId, ConcurrentHashMap<String, Long> keywordCounter,
        Metacard metacard, XPathHelper xpathHelper) throws XPathExpressionException {
    NodeList keywordNodes = (NodeList) xpathHelper.evaluate(XPATH_KEYWORD, XPathConstants.NODESET);
    LOGGER.trace("Keyword info from site {} record {}: {}", sourceId, metacard.getId(), keywordNodes);
    if (keywordNodes != null) {
        for (int i = 0; i < keywordNodes.getLength(); i++) {
            String word = getAttributeValue(keywordNodes.item(i), DDMS_NAMESPACE, VALUE_ATTRIBUTE);
            if (StringUtils.isNotBlank(word)) {
                keywordCounter.compute(word, (k, v) -> {
                    return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
                });/*ww w.  j  a  v  a 2s.  c om*/
            }
        }
    }
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateType(String sourceId, ConcurrentHashMap<CompoundTypeIdentifierType, Long> typeCounter,
        Metacard metacard, XPathHelper xpathHelper) throws XPathExpressionException {
    NodeList typeNode = (NodeList) xpathHelper.evaluate(XPATH_TYPE, XPathConstants.NODESET,
            new NamespaceMapImpl(namespaceMap));
    LOGGER.trace("Type info from site {} record {}: {}", sourceId, metacard.getId(), typeNode);
    if (typeNode != null) {
        for (int i = 0; i < typeNode.getLength(); i++) {
            CompoundTypeIdentifierType type = new CompoundTypeIdentifierType();

            type.setQualifier(getAttributeValue(typeNode.item(i), DDMS_NAMESPACE, QUALIFIER_ATTRIBUTE));
            type.setValue(getAttributeValue(typeNode.item(i), DDMS_NAMESPACE, VALUE_ATTRIBUTE));

            typeCounter.compute(type, (k, v) -> {
                return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
            });/*from  w  w  w . j  av  a2  s  .  c om*/
        }
    }
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateSource(String sourceId, ConcurrentHashMap<QualifierValueHolder, Long> sourceCounter,
        Result result, Metacard metacard, XPathHelper xpathHelper) {
    generatorConfig.getSourceXPaths().forEach(xpath -> {
        try {// w w  w .j  a v  a 2 s.  c  om
            NodeList sourceNodes = (NodeList) xpathHelper.evaluate(xpath, XPathConstants.NODESET);
            LOGGER.trace("Source Element info from site {} record {}: {}", sourceId, metacard.getId(),
                    sourceNodes);
            if (sourceNodes != null) {
                for (int i = 0; i < sourceNodes.getLength(); i++) {
                    String qual = getAttributeValue(sourceNodes.item(i), DDMS_NAMESPACE, QUALIFIER_ATTRIBUTE);
                    String val = getAttributeValue(sourceNodes.item(i), DDMS_NAMESPACE, VALUE_ATTRIBUTE);
                    sourceCounter.compute(new QualifierValueHolder(qual, val), (k, v) -> {
                        return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
                    });
                }
            }
        } catch (Exception e) {
            LOGGER.error("Error extracting Sources {} ", result.getMetacard().getId(), e);
        }
    });
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateCategory(String sourceId,
        ConcurrentHashMap<CompoundCategoryIdentifierType, Long> categoryCounter, Metacard metacard,
        XPathHelper xpathHelper) throws XPathExpressionException {
    NodeList categoryNode = (NodeList) xpathHelper.evaluate(XPATH_CATEGORY, XPathConstants.NODESET);
    LOGGER.trace("Category info from site {} record {}: {}", sourceId, metacard.getId(), categoryNode);
    if (categoryNode != null) {
        for (int i = 0; i < categoryNode.getLength(); i++) {
            CompoundCategoryIdentifierType catType = new CompoundCategoryIdentifierType();

            catType.setCode(getAttributeValue(categoryNode.item(i), DDMS_NAMESPACE, "code"));
            catType.setQualifier(getAttributeValue(categoryNode.item(i), DDMS_NAMESPACE, "qualifier"));
            catType.setLabel(getAttributeValue(categoryNode.item(i), DDMS_NAMESPACE, "label"));

            categoryCounter.compute(catType, (k, v) -> {
                return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
            });/*www.j a  v a2 s  . c  om*/
        }
    }
}

From source file:net.di2e.ecdr.describe.generator.DescribeGeneratorImpl.java

private void populateSecurityCoverage(String sourceId, ConcurrentHashMap<SecurityType, Long> secCounter,
        Metacard metacard, XPathHelper xpathHelper) throws XPathExpressionException {
    Node securityNode = (Node) xpathHelper.evaluate(XPATH_SECURITY, XPathConstants.NODE);
    LOGGER.trace("Security info from site {} record {}: {}", sourceId, metacard.getId(), securityNode);
    if (securityNode != null) {
        SecurityType secType = new SecurityType();
        String value = getAttributeValue(securityNode, ICISM_NAMESPACE, "classification");
        if (value != null) {
            secType.setClassification(CVEnumISMClassificationAll.fromValue(value));
        }/* ww  w  .j a v  a2  s  .c om*/

        value = getAttributeValue(securityNode, ICISM_NAMESPACE, "ownerProducer");
        if (value != null) {
            secType.setOwnerProducer(Arrays.asList(value.split(" ")));
        }

        value = getAttributeValue(securityNode, ICISM_NAMESPACE, "releasableTo");
        if (value != null) {
            secType.setReleasableTo(Arrays.asList(value.split(" ")));
        }

        value = getAttributeValue(securityNode, ICISM_NAMESPACE, "disseminationControls");
        if (value != null) {
            secType.setDisseminationControls(Arrays.asList(value.split(" ")));
        }

        secCounter.compute(secType, (k, v) -> {
            return Long.valueOf(v == null ? 1L : v.longValue() + 1L);
        });
    }
}