Example usage for org.apache.commons.collections ListUtils removeAll

List of usage examples for org.apache.commons.collections ListUtils removeAll

Introduction

In this page you can find the example usage for org.apache.commons.collections ListUtils removeAll.

Prototype

public static List removeAll(Collection collection, Collection remove) 

Source Link

Document

Removes the elements in remove from collection.

Usage

From source file:com.agiletec.plugins.jpldap.apsadmin.user.UserFinderAction.java

@Override
public List<String> getSearchResult() {
    List<String> mainSearchResult = super.getSearchResult();
    try {/*  w  w w  .  java2 s . c  o m*/
        Integer userType = this.getUserType();
        if (null == userType || userType == 0) {
            return mainSearchResult;
        } else {
            Boolean entandoUser = (userType == 1);
            List<String> ldapUsernames = this.getLdapUsernames();
            List<String> newList = null;
            if (entandoUser) {
                newList = (List<String>) ListUtils.removeAll(mainSearchResult, ldapUsernames);
            } else {
                newList = (List<String>) ListUtils.intersection(mainSearchResult, ldapUsernames);
            }
            return newList;
        }
    } catch (Throwable t) {
        ApsSystemUtils.logThrowable(t, this, "getSearchResult");
        throw new RuntimeException("Error while searching users", t);
    }
}

From source file:com.wizecommerce.hecuba.HecubaCassandraManagerTestBase.java

public void testConstructor() {
    String clusterName = "My Awesome Cluster";
    String locationURL = "africa:northamerica:southamerica:asia:europe";
    String thriftPorts = "3726";
    String keyspace = "WizeCommerce";
    String cf = "Platform and Infra";
    String secondaryIndexColumns = "Column_1:Column_2:Column_3";

    CassandraParamsBean bean = new CassandraParamsBean();
    bean.setClustername(clusterName);//from w w  w . jav  a 2  s  .  co m
    bean.setLocationURLs(locationURL);
    bean.setThriftPorts(thriftPorts);
    bean.setKeyspace(keyspace);
    bean.setColumnFamily(cf);
    bean.setSiColumns(secondaryIndexColumns);

    assertEquals(clusterName, bean.getClustername());
    assertEquals(locationURL, bean.getLocationURLs());
    assertEquals(thriftPorts, bean.getThriftPorts());
    assertEquals(keyspace, bean.getKeyspace());
    assertEquals(cf, bean.getColumnFamily());
    assertEquals(secondaryIndexColumns, bean.getSiColumns());

    HecubaClientManager<Long> cassandraManager = getHecubaClientManager(bean);
    assertEquals(clusterName, cassandraManager.getClusterName());
    assertEquals(locationURL, cassandraManager.getLocationURL());
    assertEquals(thriftPorts, cassandraManager.getPort());
    assertEquals(keyspace, cassandraManager.getKeyspace());
    assertEquals(cf, cassandraManager.getColumnFamilyName());
    assertTrue(ListUtils.removeAll(Arrays.asList(secondaryIndexColumns.split(":")),
            cassandraManager.getColumnsToIndexOnColumnNameAndValue()).size() == 0);

}

From source file:edu.isistan.carcha.CarchaPipelineTest.java

/**
 * Test calculate metrics.//  w w  w  .  j av  a  2  s. c  om
 *
 * @param output the result file from executing the annotator
 * @param golden the golden file annotated by experts
 * @throws UIMAException the UIMA exception
 * @throws IOException Signals that an I/O exception has occurred.
 */
private void testCalculateMetrics(String output, String golden) throws UIMAException, IOException {

    List<String> goldenDesignDecisions = Utils.extractCoveredTextAnnotations(golden, DesignDecision.class);
    List<String> goldenSentences = Utils.extractCoveredTextAnnotations(golden, Sentence.class);

    List<String> discoveredDesignDecisions = Utils.extractCoveredTextAnnotations(output, DesignDecision.class);
    List<String> discoveredSentences = Utils.extractCoveredTextAnnotations(output, Sentence.class);

    //Golden design decision that were not discovered
    double fn = ListUtils.removeAll(goldenDesignDecisions, discoveredDesignDecisions).size();

    //Sentences that were discovered but are not design decisions
    double fp = ListUtils.removeAll(discoveredDesignDecisions, goldenDesignDecisions).size();

    //Discovered Design Decisions.
    double tp = ListUtils.intersection(discoveredDesignDecisions, goldenDesignDecisions).size();

    //non design decision that were not marked as design decision
    double tn = ListUtils.intersection(discoveredSentences, goldenSentences).size();

    Double presicion = tp / (tp + fp);
    double recall = tp / (tp + fn);
    double fMeasure = 5 * ((presicion * recall) / ((4 * presicion) + recall));
    double accuracy = (tp + tn) / (fn + fp + tp + tn);

    NumberFormat df = new DecimalFormat("#0.00");

    logger.info("Golden DDs:           " + goldenDesignDecisions.size());
    logger.info("Golden Sentences:     " + goldenSentences.size());
    logger.info("Discovered DDs:       " + discoveredDesignDecisions.size());
    logger.info("Discovered Sentences: " + discoveredSentences.size());
    logger.info("------------------");
    logger.info("False Negative:   " + fn);
    logger.info("False Positive:   " + fp);
    logger.info("True  Negative:   " + tn);
    logger.info("True Positive:    " + tp);
    logger.info("------------------");
    logger.info("Presition: " + df.format(presicion * 100) + "%");
    logger.info("Recall:    " + df.format(recall * 100) + "%");
    logger.info("Acurracy:  " + df.format(accuracy * 100) + "%");
    logger.info("F-Measure: " + df.format(fMeasure * 100) + "%");
}

From source file:edu.isistan.carcha.lsa.LSARunnerTest.java

/**
 * Test calculate metrics.//from ww w .  j ava  2s  .  co m
 * 
 * @param result
 *            the result
 * @param golden
 *            the golden
 */
public void calculateMetrics(TraceabilityDocument result, TraceabilityDocument golden) {

    double fn = ListUtils.removeAll(golden.getLinks(), result.getLinks()).size();
    double fp = ListUtils.removeAll(result.getLinks(), golden.getLinks()).size();
    double tn = Utils.calculateTrueNegatives(golden, result);
    double tp = ListUtils.intersection(result.getLinks(), golden.getLinks()).size();

    double presicion = tp / (tp + fp + 0.0000000001);
    double recall = tp / (tp + fn);
    double fMeasure = 5 * ((presicion * recall) / ((4 * presicion) + recall + 0.0000000001));
    double accuracy = (tp + tn) / (fn + fp + tp + tn);

    NumberFormat df = new DecimalFormat("#0.00");

    logger.info("Golden TRAs:" + golden.getLinks().size());
    logger.info("Discovered TRAs:" + result.getLinks().size());
    logger.info("------------------");
    logger.info("False Negative:\t" + fn);
    logger.info("False Positive:\t" + fp);
    logger.info("True  Negative:\t" + tn);
    logger.info("True Positive:\t" + tp);
    logger.info("------------------");
    logger.info("Presition:\t" + df.format(presicion * 100) + "%");
    logger.info("Recall:\t" + df.format(recall * 100) + "%");
    logger.info("Acurracy:\t" + df.format(accuracy * 100) + "%");
    logger.info("F-Measure:\t" + df.format(fMeasure * 100) + "%");
    logger.info("");
    logger.info("");
}

From source file:com.qrmedia.commons.graph.traverser.AbstractNodeIteratingGraphTraverser.java

/**
 * See {@link GraphTraverser#addNode(Collection)}. 
 * <p>// w  w  w .  ja  va2  s.  co m
 * The order in which these nodes will be visited is <u>not</u> guaranteed - they will,
 * of course, be visited after any node(s) registered in previous <code>addNode</code> calls,
 * and before any queued in subsequent calls.
 *  
 * @param nodes  the node to be queued for visiting
 * @see #addNode(Object)
 */
@SuppressWarnings("unchecked")
public void addNode(Collection<? extends T> nodes) {
    /*
     * Strip out all the nodes that have already been visited, *without* modifying the 
     * original. Can't call CollectionUtils.removeAll as the 3.2.1 version contains a bug!
     */
    Collection<T> unseenNodes = (Collection<T>) ListUtils.removeAll(nodes, visitedOrQueuedNodes);

    if (!unseenNodes.isEmpty()) {
        visitedOrQueuedNodes.addAll(unseenNodes);
        enqueueNodes(unseenNodes);
    }

}

From source file:com.doculibre.constellio.solr.handler.component.ConstellioAuthorizationComponent.java

@SuppressWarnings("unchecked")
@Override//from  w  ww  . j  av a  2  s .  co  m
public void prepare(ResponseBuilder rb) throws IOException {
    SolrQueryRequest req = rb.req;
    SolrIndexSearcher searcher = req.getSearcher();
    //IndexReader reader = req.getSearcher().getReader();
    SolrParams params = req.getParams();

    // A runtime param can skip
    if (!params.getBool(ENABLE, true)) {
        return;
    }

    Query query = rb.getQuery();
    String qstr = rb.getQueryString();
    if (query == null || qstr == null) {
        return;
    }

    ConstellioUser user;
    String userIdStr = params.get(ConstellioSolrQueryParams.USER_ID);
    if (userIdStr != null) {
        UserServices userServices = ConstellioSpringUtils.getUserServices();
        try {
            user = userServices.get(new Long(userIdStr));
        } catch (NumberFormatException e) {
            user = null;
        }
    } else {
        user = null;
    }

    String collectionName = params.get(ConstellioSolrQueryParams.COLLECTION_NAME);
    RecordCollectionServices collectionServices = ConstellioSpringUtils.getRecordCollectionServices();
    FederationServices federationServices = ConstellioSpringUtils.getFederationServices();
    RecordCollection collection = collectionServices.get(collectionName);

    List<TermQuery> restrictedCollectionQueries = new ArrayList<TermQuery>();
    if (collection.isFederationOwner()) {
        List<RecordCollection> includedCollections = federationServices.listIncludedCollections(collection);
        for (RecordCollection includedCollection : includedCollections) {
            if (includedCollection.hasSearchPermission()
                    && (user == null || !user.hasSearchPermission(includedCollection))) {
                restrictedCollectionQueries.add(new TermQuery(
                        new Term(IndexField.COLLECTION_ID_FIELD, "" + includedCollection.getId())));
            }
        }
    }

    // User must be logged in to see private records
    if (user != null) {
        String luceneQueryStr = params.get(ConstellioSolrQueryParams.LUCENE_QUERY);
        if (StringUtils.isBlank(luceneQueryStr)) {
            return;
        }

        IndexSchema schema = req.getSchema();
        SolrQueryParser queryParser = new SolrQueryParser(rb.getQparser(), IndexField.DEFAULT_SEARCH_FIELD);
        Query luceneQuery;
        try {
            luceneQuery = queryParser.parse(luceneQueryStr);
        } catch (SyntaxError e) {
            log.error("Error parsing lucene query " + luceneQueryStr, e);
            return;
        }
        // Create a new query which will only include private records
        BooleanQuery privateRecordQuery = new BooleanQuery(true);
        privateRecordQuery.add(luceneQuery, BooleanClause.Occur.MUST);
        for (TermQuery restrictionCollectionQuery : restrictedCollectionQueries) {
            privateRecordQuery.add(restrictionCollectionQuery, BooleanClause.Occur.MUST_NOT);
        }

        TermQuery privateRecordTQ = new TermQuery(new Term(IndexField.PUBLIC_RECORD_FIELD, "F"));
        privateRecordQuery.add(privateRecordTQ, BooleanClause.Occur.MUST);

        DocSet privateRecordIdDocSet = searcher.getDocSet(privateRecordQuery);

        if (privateRecordIdDocSet.size() > 0) {
            RecordServices recordServices = ConstellioSpringUtils.getRecordServices();
            ACLServices aclServices = ConstellioSpringUtils.getACLServices();
            ConnectorManagerServices connectorManagerServices = ConstellioSpringUtils
                    .getConnectorManagerServices();

            List<Record> privateRecords = new ArrayList<Record>();
            DocIterator docIt = privateRecordIdDocSet.iterator();
            while (docIt.hasNext()) {
                int docId = docIt.nextDoc();
                Document luceneDoc = searcher.doc(docId);
                Long recordId = new Long(luceneDoc.get(IndexField.RECORD_ID_FIELD));
                Record record = recordServices.get(recordId, collection);
                privateRecords.add(record);
            }
            // First pass : Remove ACL authorized records
            List<Record> unevaluatedPrivateRecords = aclServices.removeAuthorizedRecords(privateRecords, user);
            if (!unevaluatedPrivateRecords.isEmpty()) {
                Set<UserCredentials> userCredentials = user.getUserCredentials();
                // Second pass : Ask the connector manager
                ConnectorManager connectorManager = connectorManagerServices.getDefaultConnectorManager();
                List<Record> authorizedRecords = connectorManagerServices
                        .authorizeByConnector(unevaluatedPrivateRecords, userCredentials, connectorManager);
                List<Record> unauthorizedRecords = ListUtils.removeAll(unevaluatedPrivateRecords,
                        authorizedRecords);

                if (!unauthorizedRecords.isEmpty()) {
                    // Create a new query which will exclude unauthorized records
                    BooleanQuery authorizedRecordQuery = new BooleanQuery(true);
                    authorizedRecordQuery.add(query, BooleanClause.Occur.MUST);
                    for (Record unauthorizedRecord : unauthorizedRecords) {
                        TermQuery unauthorizedRecordTQ = new TermQuery(
                                new Term(IndexField.RECORD_ID_FIELD, "" + unauthorizedRecord.getId()));
                        authorizedRecordQuery.add(unauthorizedRecordTQ, BooleanClause.Occur.MUST_NOT);
                    }
                    rb.setQuery(authorizedRecordQuery);
                }
            }
        }
    } else {
        BooleanQuery publicRecordQuery = new BooleanQuery(true);
        publicRecordQuery.add(query, BooleanClause.Occur.MUST);
        TermQuery publicRecordTQ = new TermQuery(new Term(IndexField.PUBLIC_RECORD_FIELD, "T"));
        publicRecordQuery.add(publicRecordTQ, BooleanClause.Occur.MUST);
        for (TermQuery restrictionCollectionQuery : restrictedCollectionQueries) {
            publicRecordQuery.add(restrictionCollectionQuery, BooleanClause.Occur.MUST_NOT);
        }
        rb.setQuery(publicRecordQuery);
    }
}

From source file:org.ensembl.healthcheck.testcase.generic.Karyotype.java

protected boolean checkKaryotype(DatabaseRegistryEntry dbre) {

    Connection con = dbre.getConnection();
    boolean result = true;

    String[] seqRegionNames = DBUtils.getColumnValues(con,
            "SELECT s.name FROM seq_region s, coord_system cs WHERE s.coord_system_id=cs.coord_system_id AND cs.name='chromosome' AND cs.attrib='default_version' AND s.name NOT LIKE 'LRG%' AND s.name != 'MT'");

    String[] patches = DBUtils.getColumnValues(con,
            "SELECT sr.name FROM seq_region sr, assembly_exception ae WHERE sr.seq_region_id=ae.seq_region_id AND ae.exc_type IN ('PATCH_NOVEL', 'PATCH_FIX', 'HAP')");
    List<String> patchList = Arrays.asList(patches);
    List<String> nonPatchSeqRegions = ListUtils.removeAll(Arrays.asList(seqRegionNames), patchList);

    int count = 0;
    try {/*from ww w . j a v a  2 s.c  om*/
        PreparedStatement stmt = con.prepareStatement(
                "SELECT sr.name, MAX(kar.seq_region_end), sr.length FROM seq_region sr, karyotype kar WHERE sr.seq_region_id=kar.seq_region_id AND sr.name = ? GROUP BY kar.seq_region_id");

        for (String seqRegion : seqRegionNames) {
            stmt.setString(1, seqRegion);
            ResultSet rs = stmt.executeQuery();
            boolean hasKaryotype = false;
            while (rs.next() && count < 50) {
                hasKaryotype = true;
                if (patchList.contains(seqRegion)) {
                    continue;
                }
                String chrName = rs.getString(1);
                int karLen = rs.getInt(2);
                int chrLen = rs.getInt(3);
                String prob = "";
                int bp = 0;
                if (karLen > chrLen) {
                    bp = karLen - chrLen;
                    prob = "longer";
                } else {
                    bp = chrLen - karLen;
                    prob = "shorter";
                }
                if (bp > 0) {
                    result = false;
                    count++;
                    ReportManager.problem(this, con, "Chromosome " + chrName + " is " + bp + "bp " + prob
                            + " in the karyotype table than " + "in the seq_region table");
                }
            }
            if (!hasKaryotype) {
                result = false;
                ReportManager.problem(this, con, "Chromosome " + seqRegion + " has no karyotype data");
            }
        }
    } catch (SQLException e) {
        e.printStackTrace();
    }
    if (count == 0) {
        ReportManager.correct(this, con,
                "Chromosome lengths are the same" + " in karyotype and seq_region tables");
    }

    return result;

}

From source file:org.jtalks.jcommune.service.transactional.TransactionalLastReadPostService.java

/**
 * {@inheritDoc}// ww  w . j ava 2s .c  om
 */
@Override
public List<Topic> fillLastReadPostForTopics(List<Topic> topics) {
    JCUser currentUser = userService.getCurrentUser();
    if (!currentUser.isAnonymous()) {
        List<Topic> notModifiedTopics = extractNotModifiedTopicsSinceForumMarkedAsRead(currentUser, topics);
        for (Topic notModifiedTopic : notModifiedTopics) {
            Post lastPost = notModifiedTopic.getLastPost();
            notModifiedTopic.setLastReadPostDate(lastPost.getCreationDate());
        }
        //
        @SuppressWarnings("unchecked")
        List<Topic> modifiedTopics = ListUtils.removeAll(topics, notModifiedTopics);
        fillLastReadPostsForModifiedTopics(modifiedTopics, currentUser);
    }
    return topics;
}

From source file:org.kuali.rice.krad.datadictionary.DataDictionary.java

/**
 * Populates and processes the dictionary bean factory based on the configured files
 *
 * @param beans - The bean factory for the dictionary bean
 * @param moduleDictionaryFiles - List of bean xml files
 * @param index - Index of the data dictionary beans
 * @param validationFiles - The List of bean xml files loaded into the bean file
 *///from   w  w  w  .  j a va2  s .  c o  m
public void loadDictionaryBeans(DefaultListableBeanFactory beans,
        Map<String, List<String>> moduleDictionaryFiles, DataDictionaryIndex index,
        ArrayList<String> validationFiles) {
    // expand configuration locations into files
    timer.start("XML File Loading");
    LOG.info("Starting DD XML File Load");

    List<String> allBeanNames = new ArrayList<String>();
    for (String namespaceCode : moduleLoadOrder) {
        LOG.info("Processing Module: " + namespaceCode);
        List<String> moduleDictionaryLocations = moduleDictionaryFiles.get(namespaceCode);
        if (LOG.isDebugEnabled()) {
            LOG.debug("DD Locations in Module: " + moduleDictionaryLocations);
        }

        if (moduleDictionaryLocations == null) {
            continue;
        }

        XmlBeanDefinitionReader xmlReader = new XmlBeanDefinitionReader(beans);

        String configFileLocationsArray[] = new String[moduleDictionaryLocations.size()];
        configFileLocationsArray = moduleDictionaryLocations.toArray(configFileLocationsArray);
        for (int i = 0; i < configFileLocationsArray.length; i++) {
            validationFiles.add(configFileLocationsArray[i]);
        }

        try {
            xmlReader.loadBeanDefinitions(configFileLocationsArray);

            // get updated bean names from factory and compare to our previous list to get those that
            // were added by the last namespace
            List<String> addedBeanNames = Arrays.asList(beans.getBeanDefinitionNames());
            addedBeanNames = ListUtils.removeAll(addedBeanNames, allBeanNames);
            index.addBeanNamesToNamespace(namespaceCode, addedBeanNames);

            allBeanNames.addAll(addedBeanNames);
        } catch (Exception e) {
            throw new DataDictionaryException("Error loading bean definitions: " + e.getLocalizedMessage(), e);
        }
    }

    LOG.info("Completed DD XML File Load");
    timer.stop();
}

From source file:org.kuali.rice.krad.devtools.datadictionary.ReloadingDataDictionary.java

/**
 * Call back when a dictionary file is changed. Calls the spring bean reader
 * to reload the file (which will override beans as necessary and destroy
 * singletons) and runs the indexer//  ww  w.  j a v  a  2 s.  c o  m
 *
 * @see no.geosoft.cc.io.FileListener#fileChanged(java.io.File)
 */
public void fileChanged(File file) {
    LOG.info("reloading dictionary configuration for " + file.getName());
    try {
        List<String> beforeReloadBeanNames = Arrays.asList(ddBeans.getBeanDefinitionNames());

        Resource resource = new FileSystemResource(file);
        xmlReader.loadBeanDefinitions(resource);

        List<String> afterReloadBeanNames = Arrays.asList(ddBeans.getBeanDefinitionNames());

        List<String> addedBeanNames = ListUtils.removeAll(afterReloadBeanNames, beforeReloadBeanNames);
        String namespace = KRADConstants.DEFAULT_NAMESPACE;
        if (fileToNamespaceMapping.containsKey(file.getAbsolutePath())) {
            namespace = fileToNamespaceMapping.get(file.getAbsolutePath());
        }

        ddIndex.addBeanNamesToNamespace(namespace, addedBeanNames);

        performDictionaryPostProcessing(true);
    } catch (Exception e) {
        LOG.info("Exception in dictionary hot deploy: " + e.getMessage(), e);
    }
}