Example usage for org.apache.commons.collections CollectionUtils containsAny

List of usage examples for org.apache.commons.collections CollectionUtils containsAny

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils containsAny.

Prototype

public static boolean containsAny(final Collection coll1, final Collection coll2) 

Source Link

Document

Returns true iff at least one element is in both collections.

Usage

From source file:nl.surfnet.coin.api.service.PersonARPEnforcer.java

/**
 * Mangle a given Person using the given ARP.
 *
 * @param person Person to mangle/*from   w  w  w  .j  a va  2s  . co  m*/
 * @param arp    ARP to use
 * @return A copy of the given person, mangled
 */
public Person enforceARP(Person person, ARP arp) {
    Assert.notNull(person);

    // No arp at all: allow everything
    if (arp == null || arp.isNoArp()) {
        return person;
    }

    // Start with an empty person.
    Person newP = new Person();

    if (arp.isNoAttrArp()) {
        // Empty arp: allow nothing
        return newP;
    }

    // Name attributes: allow all in case any name attribute is allowed (simplicity/usability sake)
    final Set<String> arpAttributeNames = arp.getAttributes().keySet();
    if (CollectionUtils.containsAny(nameAttributes, arpAttributeNames)) {
        newP.setDisplayName(person.getDisplayName());
        newP.setName(person.getName());
        newP.setNickname(person.getNickname());
    }

    // organization attributes: allow all in case any name attribute is allowed (simplicity/usability sake)
    if (CollectionUtils.containsAny(organizationAttributes, arpAttributeNames)) {
        newP.setOrganizations(person.getOrganizations());
    }

    // Email
    if (arpAttributeNames.contains(Attribute.EMAIL.name)) {
        newP.setEmails(person.getEmails());
    }

    if (CollectionUtils.containsAny(idAttributes, arpAttributeNames)) {
        newP.setTags(person.getTags());
        newP.setAccounts(person.getAccounts());
        newP.setId(person.getId());
        newP.setVoot_membership_role(person.getVoot_membership_role());
    }

    return newP;
}

From source file:ome.services.sharing.BlobShareStore.java

@SuppressWarnings("unchecked")
boolean imagesContainsInstrument(Session s, List<Long> images, Instrument instr, Map<Long, List<Long>> cache) {
    if (instr == null) {
        return false;
    }/* w  ww  .  j av a2  s .com*/
    Long instrID = instr.getId();
    List<Long> imgIDs;
    if (cache.containsKey(instrID)) {
        imgIDs = cache.get(instrID);
    } else {
        imgIDs = (List<Long>) s.createQuery("select id from Image where instrument.id = ?")
                .setParameter(0, instrID).list();
        cache.put(instrID, imgIDs);
    }
    return CollectionUtils.containsAny(images, imgIDs);
}

From source file:ome.services.sharing.BlobShareStore.java

@SuppressWarnings("unchecked")
boolean imagesContainsObjectiveSettings(Session s, List<Long> images, long osID, Map<Long, List<Long>> cache) {
    List<Long> imgIDs;
    if (cache.containsKey(osID)) {
        imgIDs = cache.get(osID);//from   w w  w.jav  a 2  s  .  c  o m
    } else {
        imgIDs = (List<Long>) s.createQuery("select id from Image where objectiveSettings.id = ?")
                .setParameter(0, osID).list();
        cache.put(osID, imgIDs);
    }
    return CollectionUtils.containsAny(images, imgIDs);
}

From source file:org.apache.atlas.discovery.EntityDiscoveryService.java

@Override
public AtlasSearchResult searchUsingBasicQuery(String query, String typeName, String classification,
        String attrName, String attrValuePrefix, boolean excludeDeletedEntities, int limit, int offset)
        throws AtlasBaseException {

    AtlasSearchResult ret = new AtlasSearchResult(AtlasQueryType.BASIC);

    if (LOG.isDebugEnabled()) {
        LOG.debug("Executing basic search query: {} with type: {} and classification: {}", query, typeName,
                classification);//from  ww  w. jav  a  2  s.c  o  m
    }

    final QueryParams params = validateSearchParams(limit, offset);
    Set<String> typeNames = null;
    Set<String> classificationNames = null;
    String attrQualifiedName = null;

    if (StringUtils.isNotEmpty(typeName)) {
        AtlasEntityType entityType = typeRegistry.getEntityTypeByName(typeName);

        if (entityType == null) {
            throw new AtlasBaseException(UNKNOWN_TYPENAME, typeName);
        }

        typeNames = entityType.getTypeAndAllSubTypes();

        ret.setType(typeName);
    }

    if (StringUtils.isNotEmpty(classification)) {
        AtlasClassificationType classificationType = typeRegistry.getClassificationTypeByName(classification);

        if (classificationType == null) {
            throw new AtlasBaseException(CLASSIFICATION_NOT_FOUND, classification);
        }

        classificationNames = classificationType.getTypeAndAllSubTypes();

        ret.setClassification(classification);
    }

    boolean isAttributeSearch = StringUtils.isNotEmpty(attrName) || StringUtils.isNotEmpty(attrValuePrefix);
    boolean isGuidPrefixSearch = false;

    if (isAttributeSearch) {
        AtlasEntityType entityType = typeRegistry.getEntityTypeByName(typeName);

        ret.setQueryType(AtlasQueryType.ATTRIBUTE);

        if (entityType != null) {
            AtlasAttribute attribute = null;

            if (StringUtils.isNotEmpty(attrName)) {
                attribute = entityType.getAttribute(attrName);

                if (attribute == null) {
                    throw new AtlasBaseException(AtlasErrorCode.UNKNOWN_ATTRIBUTE, attrName, typeName);
                }

            } else {
                // if attrName is null|empty iterate defaultAttrNames to get attribute value
                final List<String> defaultAttrNames = new ArrayList<>(Arrays.asList("qualifiedName", "name"));
                Iterator<String> iter = defaultAttrNames.iterator();

                while (iter.hasNext() && attribute == null) {
                    attrName = iter.next();
                    attribute = entityType.getAttribute(attrName);
                }
            }

            if (attribute == null) {
                // for guid prefix search use gremlin and nullify query to avoid using fulltext
                // (guids cannot be searched in fulltext)
                isGuidPrefixSearch = true;
                query = null;

            } else {
                attrQualifiedName = attribute.getQualifiedName();

                String attrQuery = String.format("%s AND (%s *)", attrName,
                        attrValuePrefix.replaceAll("\\.", " "));

                query = StringUtils.isEmpty(query) ? attrQuery
                        : String.format("(%s) AND (%s)", query, attrQuery);
            }
        }

        if (LOG.isDebugEnabled()) {
            LOG.debug("Executing attribute search attrName: {} and attrValue: {}", attrName, attrValuePrefix);
        }
    }

    // if query was provided, perform indexQuery and filter for typeName & classification in memory; this approach
    // results in a faster and accurate results than using CONTAINS/CONTAINS_PREFIX filter on entityText property
    if (StringUtils.isNotEmpty(query)) {
        final String idxQuery = getQueryForFullTextSearch(query, typeName, classification);
        final int startIdx = params.offset();
        final int resultSize = params.limit();
        int resultIdx = 0;

        for (int indexQueryOffset = 0;; indexQueryOffset += getMaxResultSetSize()) {
            final Iterator<Result<?, ?>> qryResult = graph
                    .indexQuery(Constants.FULLTEXT_INDEX, idxQuery, indexQueryOffset).vertices();

            if (LOG.isDebugEnabled()) {
                LOG.debug("indexQuery: query=" + idxQuery + "; offset=" + indexQueryOffset);
            }

            if (!qryResult.hasNext()) {
                break;
            }

            while (qryResult.hasNext()) {
                AtlasVertex<?, ?> vertex = qryResult.next().getVertex();
                String vertexTypeName = GraphHelper.getTypeName(vertex);

                // skip non-entity vertices
                if (StringUtils.isEmpty(vertexTypeName) || StringUtils.isEmpty(GraphHelper.getGuid(vertex))) {
                    continue;
                }

                if (typeNames != null && !typeNames.contains(vertexTypeName)) {
                    continue;
                }

                if (classificationNames != null) {
                    List<String> traitNames = GraphHelper.getTraitNames(vertex);

                    if (CollectionUtils.isEmpty(traitNames)
                            || !CollectionUtils.containsAny(classificationNames, traitNames)) {
                        continue;
                    }
                }

                if (isAttributeSearch) {
                    String vertexAttrValue = vertex.getProperty(attrQualifiedName, String.class);

                    if (StringUtils.isNotEmpty(vertexAttrValue)
                            && !vertexAttrValue.startsWith(attrValuePrefix)) {
                        continue;
                    }
                }

                if (skipDeletedEntities(excludeDeletedEntities, vertex)) {
                    continue;
                }

                resultIdx++;

                if (resultIdx <= startIdx) {
                    continue;
                }

                AtlasEntityHeader header = entityRetriever.toAtlasEntityHeader(vertex);

                ret.addEntity(header);

                if (ret.getEntities().size() == resultSize) {
                    break;
                }
            }

            if (ret.getEntities() != null && ret.getEntities().size() == resultSize) {
                break;
            }
        }
    } else {
        final Map<String, Object> bindings = new HashMap<>();
        String basicQuery = "g.V()";

        if (classificationNames != null) {
            bindings.put("traitNames", classificationNames);

            basicQuery += gremlinQueryProvider.getQuery(AtlasGremlinQuery.BASIC_SEARCH_CLASSIFICATION_FILTER);
        }

        if (typeNames != null) {
            bindings.put("typeNames", typeNames);

            basicQuery += gremlinQueryProvider.getQuery(AtlasGremlinQuery.BASIC_SEARCH_TYPE_FILTER);
        }

        if (excludeDeletedEntities) {
            bindings.put("state", Status.ACTIVE.toString());

            basicQuery += gremlinQueryProvider.getQuery(AtlasGremlinQuery.BASIC_SEARCH_STATE_FILTER);
        }

        if (isGuidPrefixSearch) {
            bindings.put("guid", attrValuePrefix + ".*");

            basicQuery += gremlinQueryProvider.getQuery(AtlasGremlinQuery.GUID_PREFIX_FILTER);
        }

        bindings.put("startIdx", params.offset());
        bindings.put("endIdx", params.offset() + params.limit());

        basicQuery += gremlinQueryProvider.getQuery(AtlasGremlinQuery.TO_RANGE_LIST);

        ScriptEngine scriptEngine = graph.getGremlinScriptEngine();

        try {
            Object result = graph.executeGremlinScript(scriptEngine, bindings, basicQuery, false);

            if (result instanceof List && CollectionUtils.isNotEmpty((List) result)) {
                List queryResult = (List) result;
                Object firstElement = queryResult.get(0);

                if (firstElement instanceof AtlasVertex) {
                    for (Object element : queryResult) {
                        if (element instanceof AtlasVertex) {

                            ret.addEntity(entityRetriever.toAtlasEntityHeader((AtlasVertex) element));
                        } else {
                            LOG.warn(
                                    "searchUsingBasicQuery({}): expected an AtlasVertex; found unexpected entry in result {}",
                                    basicQuery, element);
                        }
                    }
                }
            }
        } catch (ScriptException e) {
            throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, basicQuery);
        } finally {
            graph.releaseGremlinScriptEngine(scriptEngine);
        }
    }

    return ret;
}

From source file:org.apache.directory.studio.ldapbrowser.core.model.schema.SchemaUtils.java

/**
 * An attribute type is marked as operational if either
 * <ul>/*  w w w.j  ava 2 s  . c  o  m*/
 * <li>the usage differs from userApplications or</li>
 * <li>it is a well-known operational attribute or 
 *     (we need this because M$ AD and Samba4 don't set the USAGE flag)</li>
 * <li>it is not declared in the schema and contains the dummy extension</li>
 * </ul>
 * 
 * @param atd the attribute type description
 * 
 * @return true, if is operational
 */
public static boolean isOperational(AttributeType atd) {
    return !UsageEnum.USER_APPLICATIONS.equals(atd.getUsage())
            || Schema.DUMMY_EXTENSIONS.equals(atd.getExtensions())
            || CollectionUtils.containsAny(OPERATIONAL_ATTRIBUTES_OIDS_AND_NAMES, getLowerCaseIdentifiers(atd));
}

From source file:org.apache.directory.studio.ldapbrowser.core.model.schema.SchemaUtils.java

public static boolean isModifiable(AttributeType atd) {
    if (atd == null) {
        return false;
    }/*from www  .jav a2 s.  c  o m*/

    if (!atd.isUserModifiable()) {
        return false;
    }

    // Check some default no-user-modification attributes
    // e.g. Siemens DirX doesn't provide a good schema.
    // TODO: make default no-user-modification attributes configurable
    if (CollectionUtils.containsAny(NON_MODIFIABLE_ATTRIBUTE_OIDS_AND_NAMES, getLowerCaseIdentifiers(atd))) {
        return false;
    }

    return true;
}

From source file:org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryCrashDetectionSelfTest.java

/**
 * @throws Exception If failed.//from   www .  j  a v  a2 s .  c  o m
 */
public void testIgfsServerClientInteractionsUponClientKilling() throws Exception {
    // Run server endpoint.
    IpcSharedMemoryServerEndpoint srv = new IpcSharedMemoryServerEndpoint(U.defaultWorkDirectory());

    new IgniteTestResources().inject(srv);

    try {
        srv.start();

        info("Check that server gets correct exception upon client's killing.");

        info("Shared memory IDs before starting client endpoint: " + IpcSharedMemoryUtils.sharedMemoryIds());

        Collection<Integer> shmemIdsWithinInteractions = interactWithClient(srv, true);

        Collection<Integer> shmemIdsAfterInteractions = null;

        // Give server endpoint some time to make resource clean up. See IpcSharedMemoryServerEndpoint.GC_FREQ.
        for (int i = 0; i < 12; i++) {
            shmemIdsAfterInteractions = IpcSharedMemoryUtils.sharedMemoryIds();

            info("Shared memory IDs created within interaction: " + shmemIdsWithinInteractions);
            info("Shared memory IDs after killing client endpoint: " + shmemIdsAfterInteractions);

            if (CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions))
                U.sleep(1000);
            else
                break;
        }

        assertFalse(
                "List of shared memory IDs after killing client endpoint should not include IDs created "
                        + "within server-client interactions.",
                CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions));
    } finally {
        srv.close();
    }
}

From source file:org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryCrashDetectionSelfTest.java

/**
 * @throws Exception If failed./*from  w w w  .  ja va 2s  . co  m*/
 */
public void testIgfsClientServerInteractionsUponServerKilling() throws Exception {
    fail("https://issues.apache.org/jira/browse/IGNITE-1386");

    Collection<Integer> shmemIdsBeforeInteractions = IpcSharedMemoryUtils.sharedMemoryIds();

    info("Shared memory IDs before starting server-client interactions: " + shmemIdsBeforeInteractions);

    Collection<Integer> shmemIdsWithinInteractions = interactWithServer();

    Collection<Integer> shmemIdsAfterInteractions = IpcSharedMemoryUtils.sharedMemoryIds();

    info("Shared memory IDs created within interaction: " + shmemIdsWithinInteractions);
    info("Shared memory IDs after server and client killing: " + shmemIdsAfterInteractions);

    if (!U.isLinux())
        assertTrue(
                "List of shared memory IDs after server-client interactions should include IDs created within "
                        + "client-server interactions.",
                shmemIdsAfterInteractions.containsAll(shmemIdsWithinInteractions));
    else
        assertFalse(
                "List of shared memory IDs after server-client interactions should not include IDs created "
                        + "(on Linux): within client-server interactions.",
                CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions));

    ProcessStartResult srvStartRes = startSharedMemoryTestServer();

    try {
        // Give server endpoint some time to make resource clean up. See IpcSharedMemoryServerEndpoint.GC_FREQ.
        for (int i = 0; i < 12; i++) {
            shmemIdsAfterInteractions = IpcSharedMemoryUtils.sharedMemoryIds();

            info("Shared memory IDs after server restart: " + shmemIdsAfterInteractions);

            if (CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions))
                U.sleep(1000);
            else
                break;
        }

        assertFalse(
                "List of shared memory IDs after server endpoint restart should not include IDs created: "
                        + "within client-server interactions.",
                CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions));
    } finally {
        srvStartRes.proc().kill();

        srvStartRes.isKilledLatch().await();
    }
}

From source file:org.apache.ignite.internal.util.ipc.shmem.IpcSharedMemoryCrashDetectionSelfTest.java

/**
 * @throws Exception If failed./*w w  w  .j  a  va  2s.co m*/
 */
public void testClientThrowsCorrectExceptionUponServerKilling() throws Exception {
    info("Shared memory IDs before starting server-client interactions: "
            + IpcSharedMemoryUtils.sharedMemoryIds());

    Collection<Integer> shmemIdsWithinInteractions = checkClientThrowsCorrectExceptionUponServerKilling();

    Collection<Integer> shmemIdsAfterInteractions = IpcSharedMemoryUtils.sharedMemoryIds();

    info("Shared memory IDs created within interaction: " + shmemIdsWithinInteractions);
    info("Shared memory IDs after server killing and client graceful termination: "
            + shmemIdsAfterInteractions);

    assertFalse(
            "List of shared memory IDs after killing server endpoint should not include IDs created "
                    + "within server-client interactions.",
            CollectionUtils.containsAny(shmemIdsAfterInteractions, shmemIdsWithinInteractions));
}

From source file:org.apache.kylin.cube.model.CubeDesc.java

public void validateAggregationGroups() {
    int index = 0;

    for (AggregationGroup agg : getAggregationGroups()) {
        if (agg.getIncludes() == null) {
            logger.error("Aggregation group " + index + " 'includes' field not set");
            throw new IllegalStateException("Aggregation group " + index + " includes field not set");
        }//from w w w .  j  a  va 2  s.c  o  m

        if (agg.getSelectRule() == null) {
            logger.error("Aggregation group " + index + " 'select_rule' field not set");
            throw new IllegalStateException("Aggregation group " + index + " select rule field not set");
        }

        Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(includeDims, agg.getIncludes());

        Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(mandatoryDims, agg.getSelectRule().mandatoryDims);

        ArrayList<Set<String>> hierarchyDimsList = Lists.newArrayList();
        Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(hierarchyDimsList, hierarchyDims, agg.getSelectRule().hierarchyDims);

        ArrayList<Set<String>> jointDimsList = Lists.newArrayList();
        Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        getDims(jointDimsList, jointDims, agg.getSelectRule().jointDims);

        if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims)
                || !includeDims.containsAll(jointDims)) {
            List<String> notIncluded = Lists.newArrayList();
            final Iterable<String> all = Iterables
                    .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims));
            for (String dim : all) {
                if (includeDims.contains(dim) == false) {
                    notIncluded.add(dim);
                }
            }
            Collections.sort(notIncluded);
            logger.error("Aggregation group " + index
                    + " Include dimensions not containing all the used dimensions");
            throw new IllegalStateException("Aggregation group " + index
                    + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString());
        }

        if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) {
            logger.warn(
                    "Aggregation group " + index + " mandatory dimensions overlap with hierarchy dimensions: "
                            + ensureOrder(CollectionUtils.intersection(mandatoryDims, hierarchyDims)));
        }
        if (CollectionUtils.containsAny(mandatoryDims, jointDims)) {
            logger.warn("Aggregation group " + index + " mandatory dimensions overlap with joint dimensions: "
                    + ensureOrder(CollectionUtils.intersection(mandatoryDims, jointDims)));
        }

        if (CollectionUtils.containsAny(hierarchyDims, jointDims)) {
            logger.error("Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions");
            throw new IllegalStateException(
                    "Aggregation group " + index + " hierarchy dimensions overlap with joint dimensions: "
                            + ensureOrder(CollectionUtils.intersection(hierarchyDims, jointDims)));
        }

        if (hasSingleOrNone(hierarchyDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a hierarchy");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a hierarchy.");
        }
        if (hasSingleOrNone(jointDimsList)) {
            logger.error("Aggregation group " + index + " require at least 2 dimensions in a joint");
            throw new IllegalStateException(
                    "Aggregation group " + index + " require at least 2 dimensions in a joint");
        }

        Pair<Boolean, Set<String>> overlap = hasOverlap(hierarchyDimsList, hierarchyDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one hierarchy: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one hierarchy: " + ensureOrder(overlap.getSecond()));
        }

        overlap = hasOverlap(jointDimsList, jointDims);
        if (overlap.getFirst() == true) {
            logger.error("Aggregation group " + index + " a dimension exist in more than one joint: "
                    + ensureOrder(overlap.getSecond()));
            throw new IllegalStateException("Aggregation group " + index
                    + " a dimension exist in more than one joint: " + ensureOrder(overlap.getSecond()));
        }

        index++;
    }
}