Example usage for org.apache.commons.collections CollectionUtils select

List of usage examples for org.apache.commons.collections CollectionUtils select

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils select.

Prototype

public static Collection select(Collection inputCollection, Predicate predicate) 

Source Link

Document

Selects all elements from input collection which match the given predicate into an output collection.

Usage

From source file:com.kcs.service.impl.GenerateXmlServiceImpl.java

private List<Datasetirf> filterRepeatChild(final Datasetirf main, List<Datasetirf> childList) {
    List<Datasetirf> result = new ArrayList<Datasetirf>();
    final Predicate childRepeatPredicate = new Predicate() {
        @Override//from ww w. ja v a  2s. c  o m
        public boolean evaluate(Object o) {
            if (Utility.isNotNull(o)) {
                Datasetirf child = (Datasetirf) o;
                boolean result = (main.getArrgmentTye().equals(child.getArrgmentTye())
                        && main.getInvPartyTye().equals(child.getInvPartyTye())
                        && main.getCurrCode().equals(child.getCurrCode())
                        && main.getDepsitTerm().equals(child.getDepsitTerm())
                        && main.getDepsitTermUnt().equals(child.getDepsitTermUnt())
                        && main.getEffectiveDate().equals(child.getEffectiveDate())
                        && objEquals(main.getEndDate(), child.getEndDate()));
                return result;
            }
            return false;
        }
    };
    result = (List<Datasetirf>) CollectionUtils.select(childList, childRepeatPredicate);
    return result;
}

From source file:net.sourceforge.fenixedu.domain.Person.java

public static Collection<Person> findInternalPersonByNameAndRole(final String name, final RoleType roleType) {
    final Role role = Role.getRoleByRoleType(roleType);
    return CollectionUtils.select(findInternalPerson(name), new Predicate() {

        @Override//from   w w  w.ja v  a 2 s . c o m
        public boolean evaluate(final Object arg0) {
            return ((Person) arg0).hasPersonRoles(role);
        }

    });
}

From source file:com.kcs.service.impl.GenerateXmlServiceImpl.java

private List<DfFxm> filterRelateFiGroupInfoChild(final DfFxm main, List<DfFxm> childList) {
    List<DfFxm> result = new ArrayList<DfFxm>();
    final Predicate childRepeatPredicate = new Predicate() {
        @Override/*from w ww. j a va  2 s  . c  o m*/
        public boolean evaluate(Object o) {
            if (Utility.isNotNull(o)) {
                DfFxm child = (DfFxm) o;
                boolean result = (objEquals(main.getCustomerCode(), child.getCustomerCode())
                        && objEquals(main.getSeq(), child.getSeq()));
                return result;
            }
            return false;
        }
    };
    result = (List<DfFxm>) CollectionUtils.select(childList, childRepeatPredicate);
    return result;
}

From source file:com.kcs.service.impl.GenerateXmlServiceImpl.java

private List<DfOlb> filterRelateFiGroupInfoOlbChild(final DfOlb main, List<DfOlb> childList) {
    List<DfOlb> result = new ArrayList<DfOlb>();
    final Predicate childRepeatPredicate = new Predicate() {
        @Override// w  w w  . j  av a2s . com
        public boolean evaluate(Object o) {
            if (Utility.isNotNull(o)) {
                DfOlb child = (DfOlb) o;
                boolean result = (objEquals(main.getCustCode(), child.getCustCode())
                        && objEquals(main.getSeq(), child.getSeq()));
                return result;
            }
            return false;
        }
    };
    result = (List<DfOlb>) CollectionUtils.select(childList, childRepeatPredicate);
    return result;
}

From source file:net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.CopyWikiTableActionFactory.java

/**
 * @see net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.ICopyWikiTableActionFactory#createMenueItem(net.sourceforge.squirrel_sql.fw.gui.action.wikiTable.ITableActionCallback)
 *///  w  ww.  j a va  2s  . c  om
@Override
public JMenuItem createMenueItem(ITableActionCallback callback) {
    @SuppressWarnings("unchecked")
    Collection<IWikiTableConfiguration> configurations = CollectionUtils
            .select(configurationFactory.getConfigurations(), new EnabledConfigurationPredicate());
    if (configurations.isEmpty()) {
        return createMenuForEmptyConfigurations();
    } else if (configurations.size() == 1) {
        return createMenuForExactlyOneConfiguration(callback, configurations.iterator().next());
    } else {
        return createMenuForAListOfConfiguration(callback, configurations);
    }
}

From source file:nz.org.take.DefaultKnowledgeBase.java

/**
 * Retrieve knowledge by predicate./*from  w w  w . ja  v a  2  s  .  c  o  m*/
 * @param p the predicate
 * @return knowledge elements
 */
public synchronized List<KnowledgeElement> getElements(final Predicate p) {
    org.apache.commons.collections.Predicate filter = new org.apache.commons.collections.Predicate() {
        public boolean evaluate(Object arg) {
            KnowledgeElement e = (KnowledgeElement) arg;
            return p.equals(e.getPredicate());
        }
    };
    return (List<KnowledgeElement>) CollectionUtils.select(this.elements, filter);
}

From source file:org.andromda.cartridges.gui.metafacades.GuiUseCaseLogicImpl.java

/**
 * @return navigationParents//from www  . j  a  v  a  2s . c  o m
 * @see org.andromda.cartridges.gui.metafacades.GuiUseCase#getNavigationParents()
 */
@Override
protected Collection<FrontEndUseCase> handleGetNavigationParents() {

    final GuiUseCase theUseCase = this;

    return CollectionUtils.select(this.getAllUseCases(), new Predicate() {

        @Override
        @SuppressWarnings("synthetic-access")
        public boolean evaluate(final Object o) {

            final GuiUseCase useCase = (GuiUseCase) o;

            if (theUseCase.equals(useCase)) {

                return false;

            }

            return GuiUseCaseLogicImpl.isParent(theUseCase, useCase);

        }

    });

}

From source file:org.apache.ambari.server.controller.AmbariManagementControllerImplTest.java

@Test
public void testScheduleSmokeTest() throws Exception {

    final String HOST1 = "host1";
    final String OS_TYPE = "centos5";
    final String STACK_ID = "HDP-2.0.1";
    final String CLUSTER_NAME = "c1";
    final String HDFS_SERVICE_CHECK_ROLE = "HDFS_SERVICE_CHECK";
    final String MAPREDUCE2_SERVICE_CHECK_ROLE = "MAPREDUCE2_SERVICE_CHECK";
    final String YARN_SERVICE_CHECK_ROLE = "YARN_SERVICE_CHECK";

    Map<String, String> mapRequestProps = Collections.<String, String>emptyMap();
    Injector injector = Guice.createInjector(new AbstractModule() {
        @Override//from  w w w  .  j  a  v a2s. c  o  m
        protected void configure() {
            Properties properties = new Properties();
            properties.setProperty(Configuration.SERVER_PERSISTENCE_TYPE_KEY, "in-memory");

            properties.setProperty(Configuration.METADETA_DIR_PATH, "src/test/resources/stacks");
            properties.setProperty(Configuration.SERVER_VERSION_FILE, "../version");
            properties.setProperty(Configuration.OS_VERSION_KEY, OS_TYPE);
            try {
                install(new ControllerModule(properties));
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
    });
    injector.getInstance(GuiceJpaInitializer.class);

    try {
        AmbariManagementController amc = injector.getInstance(AmbariManagementController.class);
        Clusters clusters = injector.getInstance(Clusters.class);

        clusters.addHost(HOST1);
        Host host = clusters.getHost(HOST1);
        host.setOsType(OS_TYPE);
        host.persist();

        ClusterRequest clusterRequest = new ClusterRequest(null, CLUSTER_NAME, STACK_ID, null);
        amc.createCluster(clusterRequest);

        Set<ServiceRequest> serviceRequests = new HashSet<ServiceRequest>();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null, null));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null, null));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null, null));

        amc.createServices(serviceRequests);

        Set<ServiceComponentRequest> serviceComponentRequests = new HashSet<ServiceComponentRequest>();
        serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "NAMENODE", null, null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", null, null));
        serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "DATANODE", null, null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", null, null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", null, null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", null, null));

        amc.createComponents(serviceComponentRequests);

        Set<HostRequest> hostRequests = new HashSet<HostRequest>();
        hostRequests.add(new HostRequest(HOST1, CLUSTER_NAME, null));

        amc.createHosts(hostRequests);

        Set<ServiceComponentHostRequest> componentHostRequests = new HashSet<ServiceComponentHostRequest>();
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "DATANODE", HOST1, null, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "NAMENODE", HOST1, null, null));
        componentHostRequests.add(
                new ServiceComponentHostRequest(CLUSTER_NAME, null, "SECONDARY_NAMENODE", HOST1, null, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "HISTORYSERVER", HOST1, null, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "RESOURCEMANAGER", HOST1, null, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "NODEMANAGER", HOST1, null, null));

        amc.createHostComponents(componentHostRequests);

        //Install services
        serviceRequests.clear();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null, State.INSTALLED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null, State.INSTALLED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null, State.INSTALLED.name()));

        amc.updateServices(serviceRequests, mapRequestProps, true, false);

        Cluster cluster = clusters.getCluster(CLUSTER_NAME);

        for (String serviceName : cluster.getServices().keySet()) {

            for (String componentName : cluster.getService(serviceName).getServiceComponents().keySet()) {

                Map<String, ServiceComponentHost> serviceComponentHosts = cluster.getService(serviceName)
                        .getServiceComponent(componentName).getServiceComponentHosts();

                for (Map.Entry<String, ServiceComponentHost> entry : serviceComponentHosts.entrySet()) {
                    ServiceComponentHost cHost = entry.getValue();
                    cHost.handleEvent(new ServiceComponentHostInstallEvent(cHost.getServiceComponentName(),
                            cHost.getHostName(), System.currentTimeMillis(), STACK_ID));
                    cHost.handleEvent(new ServiceComponentHostOpSucceededEvent(cHost.getServiceComponentName(),
                            cHost.getHostName(), System.currentTimeMillis()));
                }
            }
        }

        //Start services
        serviceRequests.clear();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null, State.STARTED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null, State.STARTED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null, State.STARTED.name()));

        RequestStatusResponse response = amc.updateServices(serviceRequests, mapRequestProps, true, false);

        Collection<?> hdfsSmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(HDFS_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for HDFS
        assertEquals(1, hdfsSmokeTasks.size());

        Collection<?> mapreduce2SmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(MAPREDUCE2_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for MAPREDUCE2
        assertEquals(1, mapreduce2SmokeTasks.size());

        Collection<?> yarnSmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(YARN_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for YARN
        assertEquals(1, yarnSmokeTasks.size());
    } finally {
        injector.getInstance(PersistService.class).stop();
    }
}

From source file:org.apache.ambari.server.controller.AmbariManagementControllerTest.java

@Test
public void testScheduleSmokeTest() throws Exception {

    final String HOST1 = "host1";
    final String OS_TYPE = "centos5";
    final String STACK_ID = "HDP-2.0.1";
    final String CLUSTER_NAME = "c1";
    final String HDFS_SERVICE_CHECK_ROLE = "HDFS_SERVICE_CHECK";
    final String MAPREDUCE2_SERVICE_CHECK_ROLE = "MAPREDUCE2_SERVICE_CHECK";
    final String YARN_SERVICE_CHECK_ROLE = "YARN_SERVICE_CHECK";

    Map<String, String> mapRequestProps = Collections.emptyMap();
    Injector injector = Guice.createInjector(new AbstractModule() {
        @Override/*from ww w .  ja v  a2s .  c om*/
        protected void configure() {
            Properties properties = new Properties();
            properties.setProperty(Configuration.SERVER_PERSISTENCE_TYPE_KEY, "in-memory");

            properties.setProperty(Configuration.METADETA_DIR_PATH, "src/test/resources/stacks");
            properties.setProperty(Configuration.SERVER_VERSION_FILE, "../version");
            properties.setProperty(Configuration.OS_VERSION_KEY, OS_TYPE);
            properties.setProperty(Configuration.SHARED_RESOURCES_DIR_KEY, "src/test/resources/");
            try {
                install(new ControllerModule(properties));
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
    });
    injector.getInstance(GuiceJpaInitializer.class);

    try {
        AmbariManagementController amc = injector.getInstance(AmbariManagementController.class);
        Clusters clusters = injector.getInstance(Clusters.class);

        clusters.addHost(HOST1);
        Host host = clusters.getHost(HOST1);
        setOsFamily(host, "redhat", "5.9");
        host.persist();

        ClusterRequest clusterRequest = new ClusterRequest(null, CLUSTER_NAME, STACK_ID, null);
        amc.createCluster(clusterRequest);

        Set<ServiceRequest> serviceRequests = new HashSet<ServiceRequest>();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", null));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", null));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", null));

        ServiceResourceProviderTest.createServices(amc, serviceRequests);

        Set<ServiceComponentRequest> serviceComponentRequests = new HashSet<ServiceComponentRequest>();
        serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "NAMENODE", null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "SECONDARY_NAMENODE", null));
        serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "HDFS", "DATANODE", null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "MAPREDUCE2", "HISTORYSERVER", null));
        serviceComponentRequests
                .add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "RESOURCEMANAGER", null));
        serviceComponentRequests.add(new ServiceComponentRequest(CLUSTER_NAME, "YARN", "NODEMANAGER", null));

        ComponentResourceProviderTest.createComponents(amc, serviceComponentRequests);

        Set<HostRequest> hostRequests = new HashSet<HostRequest>();
        hostRequests.add(new HostRequest(HOST1, CLUSTER_NAME, null));

        HostResourceProviderTest.createHosts(amc, hostRequests);

        Set<ServiceComponentHostRequest> componentHostRequests = new HashSet<ServiceComponentHostRequest>();
        componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "DATANODE", HOST1, null));
        componentHostRequests.add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "NAMENODE", HOST1, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "SECONDARY_NAMENODE", HOST1, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "HISTORYSERVER", HOST1, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "RESOURCEMANAGER", HOST1, null));
        componentHostRequests
                .add(new ServiceComponentHostRequest(CLUSTER_NAME, null, "NODEMANAGER", HOST1, null));

        amc.createHostComponents(componentHostRequests);

        //Install services
        serviceRequests.clear();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", State.INSTALLED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", State.INSTALLED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", State.INSTALLED.name()));

        ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);

        Cluster cluster = clusters.getCluster(CLUSTER_NAME);

        for (String serviceName : cluster.getServices().keySet()) {

            for (String componentName : cluster.getService(serviceName).getServiceComponents().keySet()) {

                Map<String, ServiceComponentHost> serviceComponentHosts = cluster.getService(serviceName)
                        .getServiceComponent(componentName).getServiceComponentHosts();

                for (Map.Entry<String, ServiceComponentHost> entry : serviceComponentHosts.entrySet()) {
                    ServiceComponentHost cHost = entry.getValue();
                    cHost.handleEvent(new ServiceComponentHostInstallEvent(cHost.getServiceComponentName(),
                            cHost.getHostName(), System.currentTimeMillis(), STACK_ID));
                    cHost.handleEvent(new ServiceComponentHostOpSucceededEvent(cHost.getServiceComponentName(),
                            cHost.getHostName(), System.currentTimeMillis()));
                }
            }
        }

        //Start services
        serviceRequests.clear();
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", State.STARTED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", State.STARTED.name()));
        serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", State.STARTED.name()));

        RequestStatusResponse response = ServiceResourceProviderTest.updateServices(amc, serviceRequests,
                mapRequestProps, true, false);

        Collection<?> hdfsSmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(HDFS_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for HDFS
        org.junit.Assert.assertEquals(1, hdfsSmokeTasks.size());

        Collection<?> mapreduce2SmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(MAPREDUCE2_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for MAPREDUCE2
        org.junit.Assert.assertEquals(1, mapreduce2SmokeTasks.size());

        Collection<?> yarnSmokeTasks = CollectionUtils.select(response.getTasks(),
                new RolePredicate(YARN_SERVICE_CHECK_ROLE));
        //Ensure that smoke test task was created for YARN
        org.junit.Assert.assertEquals(1, yarnSmokeTasks.size());
    } finally {
        injector.getInstance(PersistService.class).stop();
    }
}

From source file:org.apache.cayenne.modeler.editor.ObjEntityTab.java

/**
 * Updates the view from the current model state. Invoked when a currently displayed
 * ObjEntity is changed.//from www  .  j ava  2  s.c  om
 * 
 * @param entity current entity
 */
private void initFromModel(final ObjEntity entity) {
    // TODO: this is a hack until we implement a real MVC
    qualifier.getComponent().setBackground(Color.WHITE);

    name.setText(entity.getName());
    superClassName.setText(entity.getSuperClassName());
    className.setText(entity.getClassName());
    readOnly.setSelected(entity.isReadOnly());

    isAbstract.setSelected(entity.isAbstract());
    serverOnly.setSelected(entity.isServerOnly());
    clientClassName.setText(entity.getClientClassName());
    clientSuperClassName.setText(entity.getClientSuperClassName());

    qualifier.setText(new ExpressionConvertor().valueAsString(entity.getDeclaredQualifier()));

    // TODO: fix inheritance - we should allow to select optimistic
    // lock if superclass is not already locked,
    // otherwise we must keep this checked in but not editable.
    optimisticLocking.setSelected(entity.getDeclaredLockType() == ObjEntity.LOCK_TYPE_OPTIMISTIC);
    excludeSuperclassListeners.setSelected(entity.isExcludingSuperclassListeners());
    excludeDefaultListeners.setSelected(entity.isExcludingDefaultListeners());

    // init DbEntities
    EntityResolver resolver = mediator.getEntityResolver();
    DataMap map = mediator.getCurrentDataMap();
    Object[] dbEntities = resolver.getDbEntities().toArray();
    Arrays.sort(dbEntities, Comparators.getDataMapChildrenComparator());

    DefaultComboBoxModel dbModel = new DefaultComboBoxModel(dbEntities);
    dbModel.setSelectedItem(entity.getDbEntity());
    dbEntityCombo.setRenderer(CellRenderers.entityListRendererWithIcons(map));
    dbEntityCombo.setModel(dbModel);

    boolean isUsedInheritance = entity.getSuperEntity() != null;
    dbEntityCombo.setEnabled(!isUsedInheritance);

    // toggle visibilty and editability rules

    toggleClientFieldsVisible(map.isClientSupported());
    toggleEnabled(entity.getSuperEntityName() == null, !entity.isServerOnly());

    // init ObjEntities for inheritance
    Predicate inheritanceFilter = new Predicate() {

        public boolean evaluate(Object object) {
            // do not show this entity or any of the subentities
            if (entity == object) {
                return false;
            }

            if (object instanceof ObjEntity) {
                return !((ObjEntity) object).isSubentityOf(entity);
            }

            return false;
        }
    };

    Object[] objEntities = CollectionUtils.select(map.getObjEntities(), inheritanceFilter).toArray();
    Arrays.sort(objEntities, Comparators.getDataMapChildrenComparator());
    Object[] finalObjEntities = new Object[objEntities.length + 1];
    finalObjEntities[0] = noInheritance;
    System.arraycopy(objEntities, 0, finalObjEntities, 1, objEntities.length);

    DefaultComboBoxModel superEntityModel = new DefaultComboBoxModel(finalObjEntities);
    superEntityModel.setSelectedItem(entity.getSuperEntity());
    superEntityCombo.setRenderer(CellRenderers.entityListRendererWithIcons(map));
    superEntityCombo.setModel(superEntityModel);
}