List of usage examples for org.apache.commons.collections ListUtils intersection
public static List intersection(final List list1, final List list2)
From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java
/** * Checks if column is modifiable./*ww w . ja v a 2 s . com*/ * * @param groupSelectionMap * the group selection map * @param selectionList * the selection list * @return true, if is column modifiable */ public boolean isColumnModifiable(TreeMap<Integer, List<List<Integer>>> groupSelectionMap, List<Integer> selectionList) { boolean retValue = false; for (int i = groupSelectionMap.lastKey(); i >= 0; i--) { retValue = true; List<List<Integer>> groups = new ArrayList<>(groupSelectionMap.get(i)); for (List<Integer> grp : groups) { if (ListUtils.intersection(selectionList, grp).size() > 0) { retValue = false; } } if (retValue) { groupSelectionMap.get(i).add(selectionList); break; } } return retValue; }
From source file:de.iteratec.iteraplan.elasticeam.emfimpl.EMFMetamodel.java
@SuppressWarnings("unchecked") private <T extends TypeExpression> List<T> filterTypes(List<T> candidateTypes) { if (disableAccessControl) { return candidateTypes; }/*from w ww. j ava 2 s. com*/ if (ElasticeamContextUtil.getCurrentContext().isSupervisor()) { return candidateTypes; } Set<T> result = Sets.newLinkedHashSet(); for (Role role : ElasticeamContextUtil.getCurrentContext().getRoles()) { if (this.readableTypes.containsKey(role)) { result.addAll(ListUtils.intersection(this.readableTypes.get(role), candidateTypes)); } } for (T type : candidateTypes) { if (type instanceof DataTypeExpression) { result.add(type); } } Set<RelationshipTypeExpression> invisibleRTEs = Sets.newHashSet(); for (T type : result) { if (type instanceof EMFRelationshipType) { EMFRelationshipType rte = (EMFRelationshipType) type; for (EReference eRef : rte.getWrapped().getEAllReferences()) { if (eRef.getLowerBound() > 0 && rte.findRelationshipEndByPersistentName(eRef.getName()) == null) { // required relEnd is invisible for current user => rte is invisible, too invisibleRTEs.add(rte); } } } } result.removeAll(invisibleRTEs); return Lists.newArrayList(result); }
From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java
/** * Rearrange groups.// w w w . j a va 2s.c o m * * @param groupSelectionMap * the group selection map * @param selectionList * the selection list */ public void rearrangeGroups(TreeMap<Integer, List<List<Integer>>> groupSelectionMap, List<Integer> selectionList) { List<Integer> tempList = new ArrayList<>(); int lastKey = groupSelectionMap.lastKey(); for (int i = lastKey; i >= 0; i--) { List<List<Integer>> groups = groupSelectionMap.get(i); for (int j = 0; j <= groups.size() - 1; j++) { if (selectionList.size() < groups.get(j).size() && ListUtils.intersection(selectionList, groups.get(j)).size() > 0) { tempList.addAll(groups.get(j)); groups.get(j).clear(); groups.set(j, new ArrayList<Integer>(selectionList)); selectionList.clear(); selectionList.addAll(tempList); } tempList.clear(); } } }
From source file:oeg.rdflicense.RDFLicenseCheck.java
/** * This is to determine the compatibility between any two licenses */// w w w. j a v a2 s . com public static String compose(RDFLicense lic1, RDFLicense lic2) { String compatible = ""; String reason = ""; String source = ""; String resulting = ""; reason = "A computation has been made on the basis of the main permissions, prohibitions. The result has been computed automatically and no warranty exists on its reliability. Please check the legal text."; source = "computed"; List<String> per1 = RDFLicenseCheck.getPermissions(lic1); List<String> pro1 = RDFLicenseCheck.getProhibitions(lic1); List<String> dut1 = RDFLicenseCheck.getDuties(lic1); List<String> per2 = RDFLicenseCheck.getPermissions(lic2); List<String> pro2 = RDFLicenseCheck.getProhibitions(lic2); List<String> dut2 = RDFLicenseCheck.getDuties(lic2); /*System.out.print("Per1: "); for(String p : per1) System.out.print(p+" "); System.out.print("\nPro1: "); for(String p : pro1) System.out.print(p+" "); System.out.print("\nPer2: "); for(String p : per2) System.out.print(p+" "); System.out.print("\nPro2: "); for(String p : pro2) System.out.print(p+" "); System.out.print("\n");*/ if (lic1.getURI().equals(lic2.getURI())) { compatible = "compatible"; reason = "The licenses are the same."; } List<String> per3 = ListUtils.intersection(per1, per2); List<String> pro3 = ListUtils.union(pro1, pro2); List<String> dut3 = ListUtils.union(dut1, dut2); List<String> em1 = ListUtils.intersection(pro3, per3); if (!em1.isEmpty()) { compatible = "not compatible"; } else { compatible = "compatible"; } RDFLicense lic3 = RDFLicenseFactory.createLicense(per3, dut3, pro3); resulting = lic3.toTTL(); // System.out.println(lic3.toTTL()); String json = ""; try { JSONObject obj = new JSONObject(); obj.put("compatible", compatible); obj.put("reason", reason); obj.put("source", source); obj.put("resulting", resulting); json = obj.toString(); } catch (Exception e) { json = "error"; } return json; }
From source file:opennlp.tools.fca.BasicLevelMetrics.java
public double simSMC(ArrayList<Integer> intent1, ArrayList<Integer> intent2) { int tp = (ListUtils.intersection(intent1, intent2)).size(); ArrayList<Integer> fnlst = new ArrayList<Integer>(); fnlst.addAll(this.attributes); fnlst.removeAll(ListUtils.union(intent1, intent2)); int fn = fnlst.size(); return (this.attributes.size() > 0) ? 1. * (tp + fn) / this.attributes.size() : 0; }
From source file:opennlp.tools.fca.BasicLevelMetrics.java
public double simJ(ArrayList<Integer> intent1, ArrayList<Integer> intent2) { return 1. * (ListUtils.intersection(intent1, intent2)).size() / (ListUtils.union(intent1, intent2)).size(); }
From source file:opennlp.tools.fca.ConceptLattice.java
public int AddIntent(List<Integer> intent, LinkedHashSet<Integer> extent, int generator) { //System.out.println("add intent "+intent+extent+generator); int generator_tmp = GetMaximalConcept(intent, generator); generator = generator_tmp;//from www . j a va2 s.co m //System.out.println(" max gen "+generator); if (conceptList.get(generator).getIntent().equals(intent)) { conceptList.get(generator).addExtents(extent); AddExtentToAncestors(extent, generator); return generator; } Set<Integer> generatorParents = conceptList.get(generator).getParents(); Set<Integer> newParents = new HashSet<Integer>(); for (int candidate : generatorParents) { if (!intent.containsAll(conceptList.get(candidate).getIntent())) { List<Integer> intersection = ListUtils.intersection(intent, conceptList.get(candidate).getIntent()); LinkedHashSet<Integer> new_extent = new LinkedHashSet<Integer>(); new_extent.addAll(conceptList.get(candidate).extent); new_extent.addAll(extent); candidate = AddIntent(intersection, new_extent, candidate); } boolean addParents = true; Iterator<Integer> iterator = newParents.iterator(); while (iterator.hasNext()) { Integer parent = iterator.next(); if (conceptList.get(parent).getIntent().containsAll(conceptList.get(candidate).getIntent())) { addParents = false; break; } else { if (conceptList.get(candidate).getIntent().containsAll(conceptList.get(parent).getIntent())) { iterator.remove(); } } } if (addParents) { newParents.add(candidate); } } FormalConcept newConcept = new FormalConcept(); newConcept.setIntent(intent); LinkedHashSet<Integer> new_extent = new LinkedHashSet<Integer>(); new_extent.addAll(conceptList.get(generator).extent); new_extent.addAll(extent); newConcept.addExtents(new_extent); newConcept.setPosition(conceptList.size()); conceptList.add(newConcept); conceptList.get(generator).getParents().add(newConcept.position); conceptList.get(newConcept.position).childs.add(generator); for (int newParent : newParents) { if (conceptList.get(generator).getParents().contains(newParent)) { conceptList.get(generator).getParents().remove(newParent); conceptList.get(newParent).childs.remove(generator); } conceptList.get(newConcept.position).getParents().add(newParent); conceptList.get(newParent).addExtents(new_extent); AddExtentToAncestors(new_extent, newParent); conceptList.get(newParent).childs.add(newConcept.position); } return newConcept.position; }
From source file:opennlp.tools.jsmlearning.JSMLearnerOnLatticeWithDeduction.java
private List<List<List<ParseTreeChunk>>> findClausesForListOfLemmas(List<String> lemmas) { for (String lemma : lemmas) { for (JSMDecision dec : accumulatedJSMResults) { String[] sepKeywords = dec.getSeparationKeywords(); // if all separation keywords occur in this phrase if (ListUtils.intersection(lemmas, Arrays.asList(sepKeywords)).size() == sepKeywords.length) { return dec.getPosHypotheses(); }//from ww w. ja v a 2s . co m } } return null; }
From source file:org.anyframe.iam.core.intercept.web.ReloadableRestrictedTimesFilterInvocationSecurityMetadataSource.java
/** * in case of roleCheck, return sum of restricted roles in case of * resourceCheck, return result of re-operation with Intersection of allowed * roles//ww w .j a v a 2 s. c o m * * @param isRoleCheck true if roleCheck * @param candidateFoundCadList list of candidate permissions that matches * the given time * @return List<ConfigAttribute> ConfigAttribute List */ private List<ConfigAttribute> recalculateCandidate(boolean isRoleCheck, List candidateFoundCadList) { List<ConfigAttribute> foundCad = new ArrayList<ConfigAttribute>(); List configList = null; List presentList = null; List nextList = null; for (int i = 0; i < candidateFoundCadList.size(); i++) { presentList = (List<ConfigAttribute>) candidateFoundCadList.get(i); if (i == 0) { configList = presentList; } if (i + 1 < candidateFoundCadList.size()) { nextList = (List<ConfigAttribute>) candidateFoundCadList.get(i + 1); // Role Check ? restricted Role ? add . if (isRoleCheck) { configList = ListUtils.sum(configList, nextList); } else { // Resource Check ? unrestricted Role ? // intersection configList = ListUtils.intersection(configList, nextList); } } } foundCad.addAll(configList); if (logger.isDebugEnabled()) logger.debug("candidateFoundCadList : " + candidateFoundCadList + (isRoleCheck ? ", summed List : " : ", intersected List : ") + foundCad); return foundCad; }
From source file:org.apache.archiva.configuration.DefaultArchivaConfiguration.java
/** * upgrade from 1.3//from w ww . j ava 2s . c o m */ private void handleUpgradeConfiguration() throws RegistryException, IndeterminateConfigurationException { List<String> dbConsumers = Arrays.asList("update-db-artifact", "update-db-repository-metadata"); // remove database consumers if here List<String> intersec = ListUtils.intersection(dbConsumers, configuration.getRepositoryScanning().getKnownContentConsumers()); if (!intersec.isEmpty()) { List<String> knowContentConsumers = new ArrayList<>( configuration.getRepositoryScanning().getKnownContentConsumers().size()); for (String knowContentConsumer : configuration.getRepositoryScanning().getKnownContentConsumers()) { if (!dbConsumers.contains(knowContentConsumer)) { knowContentConsumers.add(knowContentConsumer); } } configuration.getRepositoryScanning().setKnownContentConsumers(knowContentConsumers); } // ensure create-archiva-metadata is here if (!configuration.getRepositoryScanning().getKnownContentConsumers().contains("create-archiva-metadata")) { List<String> knowContentConsumers = new ArrayList<>( configuration.getRepositoryScanning().getKnownContentConsumers()); knowContentConsumers.add("create-archiva-metadata"); configuration.getRepositoryScanning().setKnownContentConsumers(knowContentConsumers); } // ensure duplicate-artifacts is here if (!configuration.getRepositoryScanning().getKnownContentConsumers().contains("duplicate-artifacts")) { List<String> knowContentConsumers = new ArrayList<>( configuration.getRepositoryScanning().getKnownContentConsumers()); knowContentConsumers.add("duplicate-artifacts"); configuration.getRepositoryScanning().setKnownContentConsumers(knowContentConsumers); } // save ?? //save( configuration ); }