List of usage examples for org.apache.commons.collections CollectionUtils containsAny
public static boolean containsAny(final Collection coll1, final Collection coll2)
true
iff at least one element is in both collections. From source file:org.apache.kylin.cube.model.CubeDesc.java
private Pair<Boolean, Set<String>> hasOverlap(ArrayList<Set<String>> dimsList, Set<String> Dims) { Set<String> existing = new HashSet<>(); Set<String> overlap = new HashSet<>(); for (Set<String> dims : dimsList) { if (CollectionUtils.containsAny(existing, dims)) { overlap.addAll(ensureOrder(CollectionUtils.intersection(existing, dims))); }/*from w w w . ja v a 2 s . c o m*/ existing.addAll(dims); } return new Pair<>(overlap.size() > 0, overlap); }
From source file:org.apache.kylin.cube.model.validation.rule.AggregationGroupRule.java
private void inner(CubeDesc cube, ValidateContext context) { if (cube.getAggregationGroups() == null || cube.getAggregationGroups().size() == 0) { context.addResult(ResultLevel.ERROR, "Cube should have at least one Aggregation group."); return;/*from ww w. j a va 2 s. c o m*/ } int index = 0; for (AggregationGroup agg : cube.getAggregationGroups()) { if (agg.getIncludes() == null) { context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' field not set"); continue; } if (agg.getSelectRule() == null) { context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'select rule' field not set"); continue; } Set<String> includeDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); if (agg.getIncludes() != null) { for (String include : agg.getIncludes()) { includeDims.add(include); } } Set<String> mandatoryDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); if (agg.getSelectRule().mandatoryDims != null) { for (String m : agg.getSelectRule().mandatoryDims) { mandatoryDims.add(m); } } Set<String> hierarchyDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); if (agg.getSelectRule().hierarchyDims != null) { for (String[] ss : agg.getSelectRule().hierarchyDims) { for (String s : ss) { hierarchyDims.add(s); } } } Set<String> jointDims = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); if (agg.getSelectRule().jointDims != null) { for (String[] ss : agg.getSelectRule().jointDims) { for (String s : ss) { jointDims.add(s); } } } if (!includeDims.containsAll(mandatoryDims) || !includeDims.containsAll(hierarchyDims) || !includeDims.containsAll(jointDims)) { List<String> notIncluded = Lists.newArrayList(); final Iterable<String> all = Iterables .unmodifiableIterable(Iterables.concat(mandatoryDims, hierarchyDims, jointDims)); for (String dim : all) { if (includeDims.contains(dim) == false) { notIncluded.add(dim); } } context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " 'includes' dimensions not include all the dimensions:" + notIncluded.toString()); continue; } if (CollectionUtils.containsAny(mandatoryDims, hierarchyDims)) { Set<String> intersection = new HashSet<>(mandatoryDims); intersection.retainAll(hierarchyDims); context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with hierarchy dimension: " + intersection.toString()); continue; } if (CollectionUtils.containsAny(mandatoryDims, jointDims)) { Set<String> intersection = new HashSet<>(mandatoryDims); intersection.retainAll(jointDims); context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " mandatory dimension has overlap with joint dimension: " + intersection.toString()); continue; } int jointDimNum = 0; if (agg.getSelectRule().jointDims != null) { for (String[] joints : agg.getSelectRule().jointDims) { Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (String s : joints) { oneJoint.add(s); } if (oneJoint.size() < 2) { context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " require at least 2 dimensions in a joint: " + oneJoint.toString()); continue; } jointDimNum += oneJoint.size(); int overlapHierarchies = 0; if (agg.getSelectRule().hierarchyDims != null) { for (String[] oneHierarchy : agg.getSelectRule().hierarchyDims) { Set<String> share = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); share.addAll(CollectionUtils.intersection(oneJoint, Arrays.asList(oneHierarchy))); if (!share.isEmpty()) { overlapHierarchies++; } if (share.size() > 1) { context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 dimensions in same hierarchy: " + share.toString()); continue; } } if (overlapHierarchies > 1) { context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " joint dimensions has overlap with more than 1 hierarchies"); continue; } } } if (jointDimNum != jointDims.size()) { Set<String> existing = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); Set<String> overlap = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (String[] joints : agg.getSelectRule().jointDims) { Set<String> oneJoint = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (String s : joints) { oneJoint.add(s); } if (CollectionUtils.containsAny(existing, oneJoint)) { overlap.addAll(CollectionUtils.intersection(existing, oneJoint)); } existing.addAll(oneJoint); } context.addResult(ResultLevel.ERROR, "Aggregation group " + index + " a dimension exists in more than one joint: " + overlap.toString()); continue; } } long combination = 0; try { combination = agg.calculateCuboidCombination(); } catch (Exception ex) { combination = getMaxCombinations(cube) + 1; } finally { if (combination > getMaxCombinations(cube)) { String msg = "Aggregation group " + index + " has too many combinations, current combination is " + combination + ", max allowed combination is " + getMaxCombinations(cube) + "; use 'mandatory'/'hierarchy'/'joint' to optimize; or update 'kylin.cube.aggrgroup.max-combination' to a bigger value."; context.addResult(ResultLevel.ERROR, msg); continue; } } index++; } }
From source file:org.apache.ranger.plugin.policyevaluator.RangerOptimizedPolicyEvaluator.java
@Override protected boolean hasMatchablePolicyItem(RangerAccessRequest request) { boolean ret = false; if (hasPublicGroup || hasCurrentUser || isOwnerMatch(request) || users.contains(request.getUser()) || CollectionUtils.containsAny(groups, request.getUserGroups())) { if (request.isAccessTypeDelegatedAdmin()) { ret = delegateAdmin;//from www. j a v a 2s .c o m } else if (hasAllPerms) { ret = true; } else { ret = request.isAccessTypeAny() || accessPerms.contains(request.getAccessType()); } } return ret; }
From source file:org.apache.ranger.plugin.policyevaluator.RangerOptimizedPolicyEvaluator.java
private boolean hasMatchablePolicyItem(String user, Set<String> userGroups, String accessType) { boolean ret = false; if (hasPublicGroup || hasCurrentUser || users.contains(user) || CollectionUtils.containsAny(groups, userGroups)) { boolean isAdminAccess = StringUtils.equals(accessType, RangerPolicyEngine.ADMIN_ACCESS); if (isAdminAccess) { ret = delegateAdmin;/*from www . ja v a 2 s. c o m*/ } else if (hasAllPerms) { ret = true; } else { boolean isAccessTypeAny = StringUtils.isEmpty(accessType) || StringUtils.equals(accessType, RangerPolicyEngine.ANY_ACCESS); ret = isAccessTypeAny || accessPerms.contains(accessType); } } return ret; }
From source file:org.apache.ranger.rest.ServiceRESTUtil.java
static private boolean removeUsersAndGroupsFromPolicy(RangerPolicy policy, Set<String> users, Set<String> groups) { boolean policyUpdated = false; List<RangerPolicy.RangerPolicyItem> policyItems = policy.getPolicyItems(); int numOfItems = policyItems.size(); for (int i = 0; i < numOfItems; i++) { RangerPolicy.RangerPolicyItem policyItem = policyItems.get(i); if (CollectionUtils.containsAny(policyItem.getUsers(), users)) { policyItem.getUsers().removeAll(users); policyUpdated = true;/*w ww.j ava 2s . co m*/ } if (CollectionUtils.containsAny(policyItem.getGroups(), groups)) { policyItem.getGroups().removeAll(groups); policyUpdated = true; } if (CollectionUtils.isEmpty(policyItem.getUsers()) && CollectionUtils.isEmpty(policyItem.getGroups())) { policyItems.remove(i); numOfItems--; i--; policyUpdated = true; } } return policyUpdated; }
From source file:org.apache.ranger.security.context.RangerPreAuthSecurityHandler.java
public boolean isAPIAccessible(Set<String> associatedTabs) throws Exception { UserSessionBase userSession = ContextUtil.getCurrentUserSession(); if (userSession != null) { sessionMgr.refreshPermissionsIfNeeded(userSession); if (userSession.getRangerUserPermission() != null) { CopyOnWriteArraySet<String> accessibleModules = userSession.getRangerUserPermission() .getUserPermissions(); if (CollectionUtils.containsAny(accessibleModules, associatedTabs)) { return true; }/*from w w w . j a va 2 s . c o m*/ } } throw restErrorUtil.createRESTException(HttpServletResponse.SC_FORBIDDEN, "User is not allowed to access the API", true); }
From source file:org.apache.sysml.hops.codegen.opt.ReachabilityGraph.java
public ReachabilityGraph(PlanPartition part, CPlanMemoTable memo) { //create repository of materialization points _matPoints = new HashMap<>(); for (InterestingPoint p : part.getMatPointsExt()) _matPoints.put(Pair.of(p._fromHopID, p._toHopID), new NodeLink(p)); //create reachability graph _root = new NodeLink(null); HashSet<VisitMarkCost> visited = new HashSet<>(); for (Long hopID : part.getRoots()) { Hop rootHop = memo.getHopRefs().get(hopID); addInputNodeLinks(rootHop, _root, part, memo, visited); }// w w w .j a v a 2 s . com //create candidate cutsets List<NodeLink> tmpCS = _matPoints.values().stream().filter(p -> p._inputs.size() > 0 && p._p != null) .sorted().collect(Collectors.toList()); //short-cut for partitions without cutsets if (tmpCS.isEmpty()) { _cutSets = new CutSet[0]; //sort materialization points in decreasing order of their sizes //which can improve the pruning efficiency by skipping larger sub-spaces. _searchSpace = sortBySize(part.getMatPointsExt(), memo, false); return; } //create composite cutsets ArrayList<ArrayList<NodeLink>> candCS = new ArrayList<>(); ArrayList<NodeLink> current = new ArrayList<>(); for (NodeLink node : tmpCS) { if (current.isEmpty()) current.add(node); else if (current.get(0).equals(node)) current.add(node); else { candCS.add(current); current = new ArrayList<>(); current.add(node); } } if (!current.isEmpty()) candCS.add(current); //evaluate cutsets (single, and duplicate pairs) ArrayList<ArrayList<NodeLink>> remain = new ArrayList<>(); ArrayList<Pair<CutSet, Double>> cutSets = evaluateCutSets(candCS, remain); if (!remain.isEmpty() && remain.size() < 5) { //second chance: for pairs for remaining candidates ArrayList<ArrayList<NodeLink>> candCS2 = new ArrayList<>(); for (int i = 0; i < remain.size() - 1; i++) for (int j = i + 1; j < remain.size(); j++) { ArrayList<NodeLink> tmp = new ArrayList<>(); tmp.addAll(remain.get(i)); tmp.addAll(remain.get(j)); candCS2.add(tmp); } ArrayList<Pair<CutSet, Double>> cutSets2 = evaluateCutSets(candCS2, remain); //ensure constructed cutsets are disjoint HashSet<InterestingPoint> testDisjoint = new HashSet<>(); for (Pair<CutSet, Double> cs : cutSets2) { if (!CollectionUtils.containsAny(testDisjoint, Arrays.asList(cs.getLeft().cut))) { cutSets.add(cs); CollectionUtils.addAll(testDisjoint, cs.getLeft().cut); } } } //sort and linearize search space according to scores _cutSets = cutSets.stream().sorted(Comparator.comparing(p -> p.getRight())).map(p -> p.getLeft()) .toArray(CutSet[]::new); //created sorted order of materialization points //(cut sets in predetermined order, other points sorted by size) HashMap<InterestingPoint, Integer> probe = new HashMap<>(); ArrayList<InterestingPoint> lsearchSpace = new ArrayList<>(); for (CutSet cs : _cutSets) { CollectionUtils.addAll(lsearchSpace, cs.cut); for (InterestingPoint p : cs.cut) probe.put(p, probe.size()); } //sort materialization points in decreasing order of their sizes //which can improve the pruning efficiency by skipping larger sub-spaces. for (InterestingPoint p : sortBySize(part.getMatPointsExt(), memo, false)) if (!probe.containsKey(p)) { lsearchSpace.add(p); probe.put(p, probe.size()); } _searchSpace = lsearchSpace.toArray(new InterestingPoint[0]); //finalize cut sets (update positions wrt search space) for (CutSet cs : _cutSets) cs.updatePositions(probe); //final sanity check of interesting points if (_searchSpace.length != part.getMatPointsExt().length) throw new RuntimeException("Corrupt linearized search space: " + _searchSpace.length + " vs " + part.getMatPointsExt().length); }
From source file:org.apache.sysml.hops.codegen.opt.ReachabilityGraph.java
private ArrayList<Pair<CutSet, Double>> evaluateCutSets(ArrayList<ArrayList<NodeLink>> candCS, ArrayList<ArrayList<NodeLink>> remain) { ArrayList<Pair<CutSet, Double>> cutSets = new ArrayList<>(); for (ArrayList<NodeLink> cand : candCS) { HashSet<NodeLink> probe = new HashSet<>(cand); //determine subproblems for cutset candidates HashSet<NodeLink> part1 = new HashSet<>(); rCollectInputs(_root, probe, part1); HashSet<NodeLink> part2 = new HashSet<>(); for (NodeLink rNode : cand) rCollectInputs(rNode, probe, part2); //select, score and create cutsets if (!CollectionUtils.containsAny(part1, part2) && !part1.isEmpty() && !part2.isEmpty()) { //score cutsets (smaller is better) double base = UtilFunctions.pow(2, _matPoints.size()); double numComb = UtilFunctions.pow(2, cand.size()); double score = (numComb - 1) / numComb * base + 1 / numComb * UtilFunctions.pow(2, part1.size()) + 1 / numComb * UtilFunctions.pow(2, part2.size()); //construct cutset cutSets.add(Pair.of(new CutSet(cand.stream().map(p -> p._p).toArray(InterestingPoint[]::new), part1.stream().map(p -> p._p).toArray(InterestingPoint[]::new), part2.stream().map(p -> p._p).toArray(InterestingPoint[]::new)), score)); } else {/*w w w .j a va 2s.c om*/ remain.add(cand); } } return cutSets; }
From source file:org.artifactory.build.BaseBuildPromoter.java
private void handleDependency(Collection<String> scopes, Set<RepoPath> itemsToMove, String buildName, String buildNumber, Dependency dependency) { List<String> dependencyScopes = dependency.getScopes(); if (org.artifactory.util.CollectionUtils.isNullOrEmpty(scopes) || (dependencyScopes != null && CollectionUtils.containsAny(dependencyScopes, scopes))) { Set<FileInfo> dependencyInfos = locateItems(buildName, buildNumber, dependency, false); for (FileInfo dependencyInfo : dependencyInfos) { itemsToMove.add(dependencyInfo.getRepoPath()); }//from ww w . j a v a 2 s .com } }
From source file:org.asqatasun.ruleimplementation.AbstractMarkerPageRuleImplementation.java
/** * @param id/*ww w. ja v a2s . c o m*/ * @param classNames * @param role * @return whether one of the string given as argument belongs to a marker * list */ private boolean checkAttributeBelongsToMarkerList(String id, Collection<String> classNames, String role, Collection<String> markerList) { if (CollectionUtils.isEmpty(markerList)) { return false; } Collection<String> elAttr = new ArrayList<>(); elAttr.add(id); elAttr.addAll(classNames); elAttr.add(role); return CollectionUtils.containsAny(markerList, elAttr); }