Example usage for org.apache.commons.collections CollectionUtils forAllDo

List of usage examples for org.apache.commons.collections CollectionUtils forAllDo

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils forAllDo.

Prototype

public static void forAllDo(Collection collection, Closure closure) 

Source Link

Document

Executes the given closure on each element in the collection.

Usage

From source file:com.projity.server.data.Serializer.java

public static void forProjectDataReversedDo(ProjectData project, Closure c) {
    for (Iterator i = project.getTasks().iterator(); i.hasNext();) {
        TaskData t = (TaskData) i.next();
        CollectionUtils.forAllDo(t.getAssignments(), c);
        CollectionUtils.forAllDo(t.getPredecessors(), c);
        c.execute(t);/*w  w  w . j a  v a  2s. co m*/
        //calendars?
    }
    for (Iterator i = project.getResources().iterator(); i.hasNext();) {
        ResourceData r = (ResourceData) i.next();
        c.execute(r.getEnterpriseResource());
        c.execute(r);
        //calendars?
    }
    if (project.getCalendar() != null) {
        c.execute(project.getCalendar());
        //base calendars to handle?
    }
    c.execute(project);
}

From source file:org.andromda.cartridges.gui.metafacades.GuiManageableEntityLogicImpl.java

/**
 * @return allRoles//from   w w  w.j ava 2 s .  co m
 * @see org.andromda.cartridges.gui.metafacades.GuiManageableEntity#getRoles()
 */
@Override
protected Collection<Role> handleGetRoles() {

    // copied form the Service <<Metafacade>>
    final Collection roles = new ArrayList(this.getTargetDependencies());

    CollectionUtils.filter(roles, new Predicate() {

        @Override
        public boolean evaluate(final Object object) {

            final DependencyFacade dependency = (DependencyFacade) object;

            return (dependency != null) && (dependency.getSourceElement() instanceof Role);

        }

    });
    CollectionUtils.transform(roles, new Transformer() {

        @Override
        public Object transform(final Object object) {

            return ((DependencyFacade) object).getSourceElement();

        }

    });

    final Collection allRoles = new LinkedHashSet(roles);

    // add all roles which are generalizations of this one
    CollectionUtils.forAllDo(roles, new Closure() {

        @Override
        public void execute(final Object object) {

            allRoles.addAll(((Role) object).getAllSpecializations());

        }

    });

    return allRoles;

}

From source file:org.apache.archiva.repository.scanner.RepositoryContentConsumers.java

/**
 * A convienence method to execute all of the active selected consumers for a
 * particular arbitrary file.//from w w w. jav a  2  s  .  c om
 * NOTE: Make sure that there is no repository scanning task executing before invoking this so as to prevent
 * the index writer/reader of the current index-content consumer executing from getting closed. For an example,
 * see ArchivaDavResource#executeConsumers( File ).
 *
 * @param repository             the repository configuration to use.
 * @param localFile              the local file to execute the consumers against.
 * @param updateRelatedArtifacts TODO
 */
public void executeConsumers(ManagedRepository repository, File localFile, boolean updateRelatedArtifacts)
        throws RepositoryAdminException {
    List<KnownRepositoryContentConsumer> selectedKnownConsumers = null;
    // Run the repository consumers
    try {
        Closure triggerBeginScan = new TriggerBeginScanClosure(repository, getStartTime(), false);

        selectedKnownConsumers = getSelectedKnownConsumers();

        // MRM-1212/MRM-1197 
        // - do not create missing/fix invalid checksums and update metadata when deploying from webdav since these are uploaded by maven
        if (!updateRelatedArtifacts) {
            List<KnownRepositoryContentConsumer> clone = new ArrayList<>();
            clone.addAll(selectedKnownConsumers);

            for (KnownRepositoryContentConsumer consumer : clone) {
                if (consumer.getId().equals("create-missing-checksums")
                        || consumer.getId().equals("metadata-updater")) {
                    selectedKnownConsumers.remove(consumer);
                }
            }
        }

        List<InvalidRepositoryContentConsumer> selectedInvalidConsumers = getSelectedInvalidConsumers();
        CollectionUtils.forAllDo(selectedKnownConsumers, triggerBeginScan);
        CollectionUtils.forAllDo(selectedInvalidConsumers, triggerBeginScan);

        // yuck. In case you can't read this, it says
        // "process the file if the consumer has it in the includes list, and not in the excludes list"
        BaseFile baseFile = new BaseFile(repository.getLocation(), localFile);
        ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate(repository);
        predicate.setBasefile(baseFile);
        predicate.setCaseSensitive(false);

        ConsumerProcessFileClosure closure = new ConsumerProcessFileClosure();
        closure.setBasefile(baseFile);
        closure.setExecuteOnEntireRepo(false);

        Closure processIfWanted = IfClosure.getInstance(predicate, closure);

        CollectionUtils.forAllDo(selectedKnownConsumers, processIfWanted);

        if (predicate.getWantedFileCount() <= 0) {
            // Nothing known processed this file.  It is invalid!
            CollectionUtils.forAllDo(selectedInvalidConsumers, closure);
        }

        TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure(repository, false);

        CollectionUtils.forAllDo(selectedKnownConsumers, scanCompletedClosure);
    } finally {
        /* TODO: This is never called by the repository scanner instance, so not calling here either - but it probably should be?
                CollectionUtils.forAllDo( availableKnownConsumers, triggerCompleteScan );
                CollectionUtils.forAllDo( availableInvalidConsumers, triggerCompleteScan );
        */
        releaseSelectedKnownConsumers(selectedKnownConsumers);
    }
}

From source file:org.apache.archiva.repository.scanner.RepositoryScannerInstance.java

public RepositoryScannerInstance(ManagedRepository repository,
        List<KnownRepositoryContentConsumer> knownConsumerList,
        List<InvalidRepositoryContentConsumer> invalidConsumerList) {
    this.repository = repository;
    this.knownConsumers = knownConsumerList;
    this.invalidConsumers = invalidConsumerList;

    consumerTimings = new HashMap<>();
    consumerCounts = new HashMap<>();

    this.consumerProcessFile = new ConsumerProcessFileClosure();
    consumerProcessFile.setExecuteOnEntireRepo(true);
    consumerProcessFile.setConsumerTimings(consumerTimings);
    consumerProcessFile.setConsumerCounts(consumerCounts);

    this.consumerWantsFile = new ConsumerWantsFilePredicate(repository);

    stats = new RepositoryScanStatistics();
    stats.setRepositoryId(repository.getId());

    Closure triggerBeginScan = new TriggerBeginScanClosure(repository, new Date(System.currentTimeMillis()),
            true);// w  ww  .  ja  va 2 s .c o m

    CollectionUtils.forAllDo(knownConsumerList, triggerBeginScan);
    CollectionUtils.forAllDo(invalidConsumerList, triggerBeginScan);

    if (SystemUtils.IS_OS_WINDOWS) {
        consumerWantsFile.setCaseSensitive(false);
    }
}

From source file:org.apache.archiva.repository.scanner.RepositoryScannerInstance.java

@Override
public void directoryWalkStep(int percentage, File file) {
    log.debug("Walk Step: {}, {}", percentage, file);

    stats.increaseFileCount();/*from   ww  w .  ja v  a  2  s  .c om*/

    // consume files regardless - the predicate will check the timestamp
    BaseFile basefile = new BaseFile(repository.getLocation(), file);

    // Timestamp finished points to the last successful scan, not this current one.
    if (file.lastModified() >= changesSince) {
        stats.increaseNewFileCount();
    }

    consumerProcessFile.setBasefile(basefile);
    consumerWantsFile.setBasefile(basefile);

    Closure processIfWanted = IfClosure.getInstance(consumerWantsFile, consumerProcessFile);
    CollectionUtils.forAllDo(this.knownConsumers, processIfWanted);

    if (consumerWantsFile.getWantedFileCount() <= 0) {
        // Nothing known processed this file.  It is invalid!
        CollectionUtils.forAllDo(this.invalidConsumers, consumerProcessFile);
    }
}

From source file:org.apache.archiva.repository.scanner.RepositoryScannerInstance.java

@Override
public void directoryWalkFinished() {
    TriggerScanCompletedClosure scanCompletedClosure = new TriggerScanCompletedClosure(repository, true);
    CollectionUtils.forAllDo(knownConsumers, scanCompletedClosure);
    CollectionUtils.forAllDo(invalidConsumers, scanCompletedClosure);

    stats.setConsumerTimings(consumerTimings);
    stats.setConsumerCounts(consumerCounts);

    log.info("Walk Finished: [{}] {}", this.repository.getId(), this.repository.getLocation());
    stats.triggerFinished();/*  w w  w . j  av  a  2  s . c  o m*/
}

From source file:org.apache.archiva.rest.services.DefaultArchivaAdministrationService.java

@Override
public List<AdminRepositoryConsumer> getKnownContentAdminRepositoryConsumers()
        throws ArchivaRestServiceException {
    try {/* ww w  .ja  va 2s  .co m*/
        AddAdminRepoConsumerClosure addAdminRepoConsumer = new AddAdminRepoConsumerClosure(
                archivaAdministration.getKnownContentConsumers());
        CollectionUtils.forAllDo(repoConsumerUtil.getAvailableKnownConsumers(), addAdminRepoConsumer);
        List<AdminRepositoryConsumer> knownContentConsumers = addAdminRepoConsumer.getList();
        Collections.sort(knownContentConsumers, AdminRepositoryConsumerComparator.getInstance());
        return knownContentConsumers;
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }
}

From source file:org.apache.archiva.rest.services.DefaultArchivaAdministrationService.java

@Override
public List<AdminRepositoryConsumer> getInvalidContentAdminRepositoryConsumers()
        throws ArchivaRestServiceException {
    try {//from   www.  j a  v  a 2 s .c  o m
        AddAdminRepoConsumerClosure addAdminRepoConsumer = new AddAdminRepoConsumerClosure(
                archivaAdministration.getInvalidContentConsumers());
        CollectionUtils.forAllDo(repoConsumerUtil.getAvailableInvalidConsumers(), addAdminRepoConsumer);
        List<AdminRepositoryConsumer> invalidContentConsumers = addAdminRepoConsumer.getList();
        Collections.sort(invalidContentConsumers, AdminRepositoryConsumerComparator.getInstance());
        return invalidContentConsumers;
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }
}

From source file:org.betaconceptframework.astroboa.engine.jcr.io.SerializationBean.java

private void serializeObjectsAndTheirDependencies(ContentObjectCriteria contentObjectCriteria,
        Serializer serializer, Session session, SerializationReport serializationReport) throws Exception {

    serializer.setDefinitionServiceDao(definitionServiceDao);

    CmsQueryResult cmsQueryResult = cmsQueryHandler.getNodesFromXPathQuery(session, contentObjectCriteria,
            false);//from  ww w.  j a  v  a 2 s.  co m

    CmsScoreNodeIterator nodeIterator = new CmsScoreNodeIteratorUsingJcrRangeIterator(
            cmsQueryResult.getRowIterator());

    if (contentObjectCriteria.getLimit() != 0) { // 0 denotes that no resource will be serialized

        Map<String, Node> repositoryUserNodes = new HashMap<String, Node>();
        Map<String, Node> taxonomyNodes = new HashMap<String, Node>();
        Map<String, Node> objectNodes = new HashMap<String, Node>();

        //We use a list because we serialize all topic hierarchy up to its taxonomy
        //Therefore we must serialize all root topics and then all first level topics etc. 
        Map<Integer, Map<String, Node>> topicNodesStack = new TreeMap<Integer, Map<String, Node>>();

        while (nodeIterator.hasNext()) {

            Node objectNode = retrieveNextObjectNode(contentObjectCriteria, nodeIterator);

            findObjectDependencies(objectNode, repositoryUserNodes, taxonomyNodes, topicNodesStack, objectNodes,
                    session);

            objectNodes.put(cmsRepositoryEntityUtils.getCmsIdentifier(objectNode), objectNode);
        }

        //Set total numbers first and then serialize
        if (!repositoryUserNodes.isEmpty()) {
            ((SerializationReportImpl) serializationReport)
                    .setTotalNumberOfRepositoryUsersToBeSerialized(repositoryUserNodes.size());
            ((SerializationReportImpl) serializationReport)
                    .setTotalNumberOfTaxonomiesToBeSerialized(repositoryUserNodes.size()); //Each user has a folksonomy
            ((SerializationReportImpl) serializationReport)
                    .setTotalNumberOfSpacesToBeSerialized(repositoryUserNodes.size()); //Each user has one root private space
        }

        if (!taxonomyNodes.isEmpty()) {
            //If at least one repositoryUser is serialized then at least one folksonomy is serialized as well
            ((SerializationReportImpl) serializationReport).setTotalNumberOfTaxonomiesToBeSerialized(
                    serializationReport.getTotalNumberOfTaxonomies() + taxonomyNodes.size());
        }

        if (!topicNodesStack.isEmpty()) {
            TopicCounterClosure topicCounter = new TopicCounterClosure();

            CollectionUtils.forAllDo(topicNodesStack.values(), topicCounter);

            ((SerializationReportImpl) serializationReport)
                    .setTotalNumberOfTopicsToBeSerialized(topicCounter.getTotalNumberOfTopicsToSerialize());
        }

        if (!objectNodes.isEmpty()) {
            ((SerializationReportImpl) serializationReport)
                    .setTotalNumberOfObjectsToBeSerialized(objectNodes.size());
        }

        //Serialize all nodes in THIS ORDER
        if (!repositoryUserNodes.isEmpty()) {
            serializeNodesRepresentingRepositoryUsers(repositoryUserNodes, serializer, session);
        }

        if (!taxonomyNodes.isEmpty()) {
            serializeNodesRepresentingTaxonomies(taxonomyNodes, serializer, session);
        }

        if (!topicNodesStack.isEmpty()) {
            serializeNodesRepresentingTopics(topicNodesStack, serializer, session);
        }

        if (!objectNodes.isEmpty()) {
            serializeNodesRepresentingObjects(objectNodes, serializer, session);
        }
    }
}

From source file:org.dasein.cloud.azure.platform.AzureSqlDatabaseSupport.java

/**
 * List supported database products/*from  ww  w .ja  va  2s .  co m*/
 *
 * @param forEngine database engine, e.g. MySQL, SQL Server EE, etc.
 * @return iteration of the database products supported by the engine
 * @throws org.dasein.cloud.CloudException
 * @throws org.dasein.cloud.InternalException
 * @since 2014.08 for consistency
 */
@Nonnull
@Override
public Iterable<DatabaseProduct> listDatabaseProducts(@Nonnull DatabaseEngine forEngine)
        throws CloudException, InternalException {
    if (forEngine == null)
        throw new InternalException(
                "Please specify the DatabaseEngine for which you want to retrieve the products.");

    if (!forEngine.name().toString().equalsIgnoreCase("sqlserver_ee"))
        return Arrays.asList();

    ServerServiceResourceModel.Version versionResult = getSubscriptionVersionProducts();

    final ArrayList<DatabaseProduct> products = new ArrayList<DatabaseProduct>();
    CollectionUtils.forAllDo(versionResult.getEditions(), new Closure() {
        @Override
        public void execute(Object input) {
            ServerServiceResourceModel.Edition edition = (ServerServiceResourceModel.Edition) input;
            for (ServerServiceResourceModel.ServiceLevelObjective serviceLevelObjective : edition
                    .getServiceLevelObjectives()) {
                DatabaseProduct product = new DatabaseProduct(serviceLevelObjective.getName(),
                        edition.getName());
                product.setProviderDataCenterId(provider.getDataCenterId(provider.getContext().getRegionId()));
                product.setEngine(DatabaseEngine.SQLSERVER_EE);
                product.setLicenseModel(DatabaseLicenseModel.LICENSE_INCLUDED);
                products.add(product);
            }
        }
    });

    return products;
}