Example usage for org.apache.commons.collections CollectionUtils subtract

List of usage examples for org.apache.commons.collections CollectionUtils subtract

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils subtract.

Prototype

public static Collection subtract(final Collection a, final Collection b) 

Source Link

Document

Returns a new Collection containing a - b.

Usage

From source file:nl.strohalm.cyclos.services.sms.SmsMailingServiceImpl.java

@SuppressWarnings("unchecked")
private void validateVariables(final SmsMailing smsMailing, final boolean isMemberRequired) {
    // Validate variables
    String text = smsMailing.getText();
    // The only variables that can appear are the allowed:
    Map<String, String> variables = null;
    if (isMemberRequired) {
        variables = getSmsTextVariables(smsMailing.getMember());
    } else {//from ww w. j a  v a2 s  .  com
        variables = getSmsTextVariables(new ArrayList<MemberGroup>(smsMailing.getGroups()));
    }

    Set<String> parsedVariables = new HashSet<String>();
    Pattern pattern = Pattern.compile("#[a-zA-Z_][a-zA-Z\\d_]*#");
    Matcher matcher = pattern.matcher(text);

    while (matcher.find()) {
        parsedVariables.add(matcher.group().replaceAll("#", ""));
    }

    Collection<String> unexpectedVariables = CollectionUtils.subtract(parsedVariables, variables.keySet());
    if (CollectionUtils.isNotEmpty(unexpectedVariables)) {
        throw new ValidationException("smsMailing.error.variableNotFound", unexpectedVariables);
    }
}

From source file:org.alfresco.po.share.workflow.SelectContentPageTest.java

@Test(groups = "Enterprise4.2")
public void getAddedItems() throws Exception {
    SelectContentPage contentPage = newWorkflowPage.clickAddItems().render();

    Assert.assertTrue(contentPage.isOkButtonPresent());
    Assert.assertTrue(contentPage.isCancelButtonPresent());
    Assert.assertTrue(contentPage.isCompanyHomeButtonPresent());
    List<String> elements = contentPage.getDirectoriesLeftPanel();
    Assert.assertTrue(elements.contains("Data Dictionary") && elements.contains("Guest Home")
            && elements.contains("Imap Attachments") && elements.contains("IMAP Home")
            && elements.contains("Shared") && elements.contains("Sites") && elements.contains("User Homes"));

    Content content1 = new Content();
    content1.setName("Doc3");
    content1.setFolder(false);/*ww  w.j  a  v a2  s .c o m*/
    Content content2 = new Content();
    content2.setName("Doc1");
    content2.setFolder(false);
    Content content3 = new Content();
    content3.setName("Doc2");
    content3.setFolder(false);
    Content content4 = new Content();
    content4.setFolder(true);
    content4.setName("Folder1");
    Set<Content> contents4 = new HashSet<Content>();
    Content content41 = new Content();
    content41.setFolder(false);
    content41.setName("F1Doc1");
    contents4.add(content41);
    content4.setContents(contents4);
    Content content5 = new Content();
    content5.setFolder(true);
    content5.setName("Folder2");
    Content content51 = new Content();
    content51.setFolder(true);
    content51.setName("Folder11");
    Content content52 = new Content();
    content52.setFolder(true);
    content52.setName("Folder21");
    Site site2 = new Site();
    site2.setName(siteName);
    Content content53 = new Content();
    content53.setFolder(false);
    content53.setName("Doc211");
    Set<Content> contents5 = new HashSet<Content>();
    contents5.add(content53);
    content52.setContents(contents5);
    Set<Content> contents51 = new HashSet<Content>();
    contents51.add(content52);
    content51.setContents(contents51);
    Set<Content> contents52 = new HashSet<Content>();
    contents52.add(content51);
    content5.setContents(contents52);
    Set<Content> site2Contents = new HashSet<Content>();
    site2Contents.add(content5);
    site2Contents.add(content4);
    site2Contents.add(content1);
    site2Contents.add(content2);
    site2Contents.add(content3);
    site2.setContents(site2Contents);
    CompanyHome companyHome = new CompanyHome();
    Set<Site> sites = new HashSet<Site>();
    sites.add(site2);
    companyHome.setSites(sites);
    contentPage.addItems(companyHome);

    Assert.assertTrue(contentPage.isRemoveIconPresent(content1.getName()));
    Assert.assertTrue(contentPage.isRemoveIconPresent(content2.getName()));

    List<String> expectedItems = new ArrayList<String>();
    expectedItems.add(content1.getName());
    expectedItems.add(content2.getName());
    expectedItems.add(content3.getName());
    expectedItems.add(content41.getName());
    expectedItems.add(content53.getName());
    Assert.assertTrue(CollectionUtils.subtract(contentPage.getAddedItems(), expectedItems).isEmpty());
    contentPage.selectOKButton();
}

From source file:org.apache.ambari.server.api.services.stackadvisor.commands.StackAdvisorCommand.java

@SuppressWarnings("unchecked")
private Collection<String> getUnregisteredHosts(String hostsJSON, List<String> hosts)
        throws StackAdvisorException {
    ObjectMapper mapper = new ObjectMapper();
    List<String> registeredHosts = new ArrayList<String>();

    try {/* w  w  w . j a  v a  2s  .  c o  m*/
        JsonNode root = mapper.readTree(hostsJSON);
        Iterator<JsonNode> iterator = root.get("items").getElements();
        while (iterator.hasNext()) {
            JsonNode next = iterator.next();
            String hostName = next.get("Hosts").get("host_name").getTextValue();
            registeredHosts.add(hostName);
        }

        return CollectionUtils.subtract(hosts, registeredHosts);
    } catch (Exception e) {
        throw new StackAdvisorException("Error occured during calculating unregistered hosts", e);
    }
}

From source file:org.apache.ambari.server.controller.AmbariManagementControllerImpl.java

@Override
@SuppressWarnings("unchecked")
public synchronized void updateMembers(Set<MemberRequest> requests) throws AmbariException {
    // validate// w  w  w .  ja va2 s .c om
    String groupName = null;
    for (MemberRequest request : requests) {
        if (groupName != null && !request.getGroupName().equals(groupName)) {
            throw new AmbariException("Can't manage members of different groups in one request");
        }
        groupName = request.getGroupName();
    }
    final List<String> requiredMembers = new ArrayList<String>();
    for (MemberRequest request : requests) {
        if (request.getUserName() != null) {
            requiredMembers.add(request.getUserName());
        }
    }
    final List<String> currentMembers = users.getAllMembers(groupName);
    for (String user : (Collection<String>) CollectionUtils.subtract(currentMembers, requiredMembers)) {
        users.removeMemberFromGroup(groupName, user);
    }
    for (String user : (Collection<String>) CollectionUtils.subtract(requiredMembers, currentMembers)) {
        users.addMemberToGroup(groupName, user);
    }
}

From source file:org.apache.ambari.server.state.ConfigMergeHelper.java

@SuppressWarnings("unchecked")
public Map<String, Map<String, ThreeWayValue>> getConflicts(String clusterName, StackId targetStack)
        throws AmbariException {
    Cluster cluster = m_clusters.get().getCluster(clusterName);
    StackId oldStack = cluster.getCurrentStackVersion();

    Map<String, Map<String, String>> oldMap = new HashMap<String, Map<String, String>>();
    Map<String, Map<String, String>> newMap = new HashMap<String, Map<String, String>>();

    // Add service properties for old and new stack
    for (String serviceName : cluster.getServices().keySet()) {
        Set<PropertyInfo> oldStackProperties = m_ambariMetaInfo.get()
                .getServiceProperties(oldStack.getStackName(), oldStack.getStackVersion(), serviceName);
        addToMap(oldMap, oldStackProperties);

        Set<PropertyInfo> newStackProperties = m_ambariMetaInfo.get()
                .getServiceProperties(targetStack.getStackName(), targetStack.getStackVersion(), serviceName);
        addToMap(newMap, newStackProperties);
    }/*from w  ww . ja v  a 2 s  .c o  m*/

    // Add stack properties for old and new stack
    Set<PropertyInfo> set = m_ambariMetaInfo.get().getStackProperties(oldStack.getStackName(),
            oldStack.getStackVersion());
    addToMap(oldMap, set);

    set = m_ambariMetaInfo.get().getStackProperties(targetStack.getStackName(), targetStack.getStackVersion());
    addToMap(newMap, set);

    // Final result after merging.
    Map<String, Map<String, ThreeWayValue>> result = new HashMap<String, Map<String, ThreeWayValue>>();

    for (Entry<String, Map<String, String>> entry : oldMap.entrySet()) {
        if (!newMap.containsKey(entry.getKey())) {
            LOG.info("Stack {} does not have an equivalent config type {} in {}", oldStack.getStackId(),
                    entry.getKey(), targetStack.getStackId());
            continue;
        }

        Map<String, String> oldPairs = entry.getValue();
        Map<String, String> newPairs = newMap.get(entry.getKey());
        Collection<String> customValueKeys = null;

        Config config = cluster.getDesiredConfigByType(entry.getKey());
        if (null != config) {
            Set<String> valueKeys = config.getProperties().keySet();

            customValueKeys = CollectionUtils.subtract(valueKeys, oldPairs.keySet());
        }

        // Keep properties with custom values (i.e., changed from default value in old stack)
        if (null != customValueKeys) {
            for (String prop : customValueKeys) {
                String newVal = newPairs.get(prop);
                String savedVal = config.getProperties().get(prop);
                if (null != newVal && null != savedVal && !newVal.equals(savedVal)) {
                    ThreeWayValue twv = new ThreeWayValue();
                    twv.oldStackValue = null;
                    twv.newStackValue = normalizeValue(savedVal, newVal.trim());
                    twv.savedValue = savedVal.trim();

                    if (!result.containsKey(entry.getKey())) {
                        result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
                    }

                    result.get(entry.getKey()).put(prop, twv);
                }
            }
        }

        Collection<String> common = CollectionUtils.intersection(newPairs.keySet(), oldPairs.keySet());

        for (String prop : common) {
            String oldStackVal = oldPairs.get(prop);
            String newStackVal = newPairs.get(prop);
            String savedVal = "";
            if (null != config) {
                savedVal = config.getProperties().get(prop);
            }

            // If values are not defined in stack (null), we skip them
            // Or if values in old stack and in new stack are the same, and value
            // in current config is different, skip it
            if (!(newStackVal == null && oldStackVal == null) && !newStackVal.equals(savedVal)
                    && (!oldStackVal.equals(newStackVal) || !oldStackVal.equals(savedVal))) {
                ThreeWayValue twv = new ThreeWayValue();
                twv.oldStackValue = normalizeValue(savedVal, oldStackVal.trim());
                twv.newStackValue = normalizeValue(savedVal, newStackVal.trim());
                twv.savedValue = (null == savedVal) ? null : savedVal.trim();

                if (!result.containsKey(entry.getKey())) {
                    result.put(entry.getKey(), new HashMap<String, ThreeWayValue>());
                }

                result.get(entry.getKey()).put(prop, twv);
            }
        }
    }

    return result;
}

From source file:org.apache.archiva.redback.rbac.AbstractRBACManager.java

/**
 * @param principal//from  w  w  w.  ja va 2 s.c om
 * @return
 * @throws RbacManagerException
 * @throws RbacObjectNotFoundException
 */
public Collection<Role> getEffectivelyUnassignedRoles(String principal)
        throws RbacManagerException, RbacObjectNotFoundException {
    Collection<Role> assignedRoles = getEffectivelyAssignedRoles(principal);
    List<Role> allRoles = getAllAssignableRoles();

    log.debug("UR: assigned {}", assignedRoles.size());
    log.debug("UR: available {}", allRoles.size());

    return CollectionUtils.subtract(allRoles, assignedRoles);
}

From source file:org.apache.archiva.redback.rbac.AbstractRBACManager.java

/**
 * @param principal//from w w w  .  j a  v a2s  . c o  m
 * @return
 * @throws RbacManagerException
 * @throws RbacObjectNotFoundException
 */
public Collection<Role> getUnassignedRoles(String principal)
        throws RbacManagerException, RbacObjectNotFoundException {
    Collection<Role> assignedRoles = getAssignedRoles(principal);
    List<Role> allRoles = getAllAssignableRoles();

    log.debug("UR: assigned {}", assignedRoles.size());
    log.debug("UR: available {}", allRoles.size());

    return CollectionUtils.subtract(allRoles, assignedRoles);
}

From source file:org.apache.atlas.repository.store.graph.v1.EntityGraphMapper.java

private List<AtlasEdge> removeUnusedArrayEntries(AtlasAttribute attribute, List<AtlasEdge> currentEntries,
        List<AtlasEdge> newEntries) throws AtlasBaseException {
    if (CollectionUtils.isNotEmpty(currentEntries)) {
        AtlasStructType entityType = attribute.getDefinedInType();
        AtlasType entryType = ((AtlasArrayType) attribute.getAttributeType()).getElementType();

        if (AtlasGraphUtilsV1.isReference(entryType)) {
            Collection<AtlasEdge> edgesToRemove = CollectionUtils.subtract(currentEntries, newEntries);

            if (CollectionUtils.isNotEmpty(edgesToRemove)) {
                List<AtlasEdge> additionalElements = new ArrayList<>();

                for (AtlasEdge edge : edgesToRemove) {
                    boolean deleted = deleteHandler.deleteEdgeReference(edge, entryType.getTypeCategory(),
                            attribute.isOwnedRef(), true);

                    if (!deleted) {
                        additionalElements.add(edge);
                    }//from w  w  w.  ja v  a  2s  .  co m
                }

                return additionalElements;
            }
        }
    }

    return Collections.emptyList();
}

From source file:org.apache.bookkeeper.replication.Auditor.java

@VisibleForTesting
synchronized Future<?> submitAuditTask() {
    if (executor.isShutdown()) {
        SettableFuture<Void> f = SettableFuture.<Void>create();
        f.setException(new BKAuditException("Auditor shutting down"));
        return f;
    }/* w  w w  . j a  v  a 2 s. co  m*/
    return executor.submit(new Runnable() {
        @SuppressWarnings("unchecked")
        public void run() {
            try {
                waitIfLedgerReplicationDisabled();

                List<String> availableBookies = getAvailableBookies();

                // casting to String, as knownBookies and availableBookies
                // contains only String values
                // find new bookies(if any) and update the known bookie list
                Collection<String> newBookies = CollectionUtils.subtract(availableBookies, knownBookies);
                knownBookies.addAll(newBookies);
                if (!bookiesToBeAudited.isEmpty() && knownBookies.containsAll(bookiesToBeAudited)) {
                    // the bookie, which went down earlier and had an audit scheduled for,
                    // has come up. So let us stop tracking it and cancel the audit. Since
                    // we allow delaying of audit when there is only one failed bookie,
                    // bookiesToBeAudited should just have 1 element and hence containsAll
                    // check should be ok
                    if (auditTask != null && auditTask.cancel(false)) {
                        auditTask = null;
                        numDelayedBookieAuditsCancelled.inc();
                    }
                    bookiesToBeAudited.clear();
                }

                // find lost bookies(if any)
                bookiesToBeAudited.addAll(CollectionUtils.subtract(knownBookies, availableBookies));
                if (bookiesToBeAudited.size() == 0) {
                    return;
                }

                knownBookies.removeAll(bookiesToBeAudited);
                if (conf.getLostBookieRecoveryDelay() == 0) {
                    startAudit(false);
                    bookiesToBeAudited.clear();
                    return;
                }
                if (bookiesToBeAudited.size() > 1) {
                    // if more than one bookie is down, start the audit immediately;
                    LOG.info("Multiple bookie failure; not delaying bookie audit. Bookies lost now: "
                            + CollectionUtils.subtract(knownBookies, availableBookies) + "; All lost bookies: "
                            + bookiesToBeAudited.toString());
                    if (auditTask != null && auditTask.cancel(false)) {
                        auditTask = null;
                        numDelayedBookieAuditsCancelled.inc();
                    }
                    startAudit(false);
                    bookiesToBeAudited.clear();
                    return;
                }
                if (auditTask == null) {
                    // if there is no scheduled audit, schedule one
                    auditTask = executor.schedule(new Runnable() {
                        public void run() {
                            startAudit(false);
                            auditTask = null;
                            bookiesToBeAudited.clear();
                        }
                    }, conf.getLostBookieRecoveryDelay(), TimeUnit.SECONDS);
                    numBookieAuditsDelayed.inc();
                    LOG.info("Delaying bookie audit by " + conf.getLostBookieRecoveryDelay() + "secs for "
                            + bookiesToBeAudited.toString());
                }
            } catch (BKException bke) {
                LOG.error("Exception getting bookie list", bke);
            } catch (InterruptedException ie) {
                Thread.currentThread().interrupt();
                LOG.error("Interrupted while watching available bookies ", ie);
            } catch (UnavailableException ue) {
                LOG.error("Exception while watching available bookies", ue);
            }
        }
    });
}

From source file:org.apache.bookkeeper.replication.Auditor.java

@SuppressWarnings("unchecked")
private void auditBookies() throws BKAuditException, KeeperException, InterruptedException, BKException {
    try {//from  w ww . j  a  v  a2s  . co  m
        waitIfLedgerReplicationDisabled();
    } catch (UnavailableException ue) {
        LOG.error("Underreplication unavailable, skipping audit." + "Will retry after a period");
        return;
    }

    Stopwatch stopwatch = new Stopwatch().start();
    // put exit cases here
    Map<String, Set<Long>> ledgerDetails = generateBookie2LedgersIndex();
    try {
        if (!ledgerUnderreplicationManager.isLedgerReplicationEnabled()) {
            // has been disabled while we were generating the index
            // discard this run, and schedule a new one
            executor.submit(BOOKIE_CHECK);
            return;
        }
    } catch (UnavailableException ue) {
        LOG.error("Underreplication unavailable, skipping audit." + "Will retry after a period");
        return;
    }

    List<String> availableBookies = getAvailableBookies();
    // find lost bookies
    Set<String> knownBookies = ledgerDetails.keySet();
    Collection<String> lostBookies = CollectionUtils.subtract(knownBookies, availableBookies);

    bookieToLedgersMapCreationTime.registerSuccessfulEvent(stopwatch.elapsedMillis(), TimeUnit.MILLISECONDS);
    if (lostBookies.size() > 0) {
        handleLostBookies(lostBookies, ledgerDetails);
        uRLPublishTimeForLostBookies.registerSuccessfulEvent(stopwatch.stop().elapsedMillis(),
                TimeUnit.MILLISECONDS);
    }

}