Example usage for com.google.common.base Predicates and

List of usage examples for com.google.common.base Predicates and

Introduction

In this page you can find the example usage for com.google.common.base Predicates and.

Prototype

public static <T> Predicate<T> and(Predicate<? super T> first, Predicate<? super T> second) 

Source Link

Document

Returns a predicate that evaluates to true if both of its components evaluate to true .

Usage

From source file:com.eucalyptus.cluster.callback.reporting.CloudWatchHelper.java

private static Predicate<AbsoluteMetricQueueItem> withMetric(final String namespace, final String name,
        final String dimensionName, final String dimensionValue) {
    return new Predicate<AbsoluteMetricQueueItem>() {
        private final Predicate<MetricDatum> metricDatumPredicate = Predicates.and(
                name == null ? Predicates.<MetricDatum>alwaysTrue() : withMetric(name),
                withMetricDimension(dimensionName, dimensionValue));

        @Override//from w  ww.j  a  v a 2 s .co m
        public boolean apply(@Nullable final AbsoluteMetricQueueItem queueItem) {
            return queueItem != null && namespace.equals(queueItem.getNamespace())
                    && queueItem.getMetricDatum() != null
                    && metricDatumPredicate.apply(queueItem.getMetricDatum());
        }
    };
}

From source file:org.eclipse.sirius.diagram.ui.tools.api.figure.locator.DBorderItemLocator.java

/**
 * Get the figures of the brother's border nodes of
 * <code>targetBorderItem</code>.
 * //from  ww w. j  ava 2s  . c om
 * @param targetBorderItem
 *            Contextual border item.
 * @return The list of figure of the brother border nodes.
 */
protected List<IFigure> getBrotherFigures(final IFigure targetBorderItem) {
    @SuppressWarnings("unchecked")
    Iterable<IFigure> brotherFigures = Iterables.filter(targetBorderItem.getParent().getChildren(), Predicates
            .and(Predicates.instanceOf(IFigure.class), Predicates.not(Predicates.equalTo(targetBorderItem))));
    return Lists.newArrayList(brotherFigures);
}

From source file:com.eucalyptus.compute.vpc.VpcManager.java

public CreateSubnetResponseType createSubnet(final CreateSubnetType request) throws EucalyptusCloudException {
    final CreateSubnetResponseType reply = request.getReply();
    final Context ctx = Contexts.lookup();
    final AccountFullName accountFullName = ctx.getUserFullName().asAccountFullName();
    final String vpcId = Identifier.vpc.normalize(request.getVpcId());
    final Optional<String> availabilityZone = Iterables.tryFind(Clusters.getInstance().listValues(),
            Predicates.and(
                    request.getAvailabilityZone() == null ? Predicates.<RestrictedType>alwaysTrue()
                            : CollectionUtils.propertyPredicate(request.getAvailabilityZone(),
                                    CloudMetadatas.toDisplayName()),
                    RestrictedTypes.filterPrivilegedWithoutOwner()))
            .transform(CloudMetadatas.toDisplayName());
    final Optional<Cidr> subnetCidr = Cidr.parse().apply(request.getCidrBlock());
    if (!subnetCidr.isPresent()) {
        throw new ClientComputeException("InvalidParameterValue", "Cidr invalid: " + request.getCidrBlock());
    }//  w  ww . ja v a2 s . c  om
    if (!availabilityZone.isPresent()) {
        throw new ClientComputeException("InvalidParameterValue",
                "Availability zone invalid: " + request.getAvailabilityZone());
    }
    final Supplier<Subnet> allocator = new Supplier<Subnet>() {
        @Override
        public Subnet get() {
            try {
                final Vpc vpc = vpcs.lookupByName(accountFullName, vpcId, Functions.<Vpc>identity());
                final Iterable<Subnet> subnetsInVpc = subnets.listByExample(
                        Subnet.exampleWithOwner(accountFullName), CollectionUtils
                                .propertyPredicate(vpc.getDisplayName(), Subnets.FilterStringFunctions.VPC_ID),
                        Functions.<Subnet>identity());
                if (Iterables.size(subnetsInVpc) >= VpcConfiguration.getSubnetsPerVpc()) {
                    throw new ClientComputeException("SubnetLimitExceeded",
                            "Subnet limit exceeded for " + vpc.getDisplayName());
                }
                if (!Cidr.parse(vpc.getCidr()).contains(subnetCidr.get())) {
                    throw new ClientComputeException("InvalidParameterValue",
                            "Cidr not valid for vpc " + request.getCidrBlock());
                }
                final Iterable<Cidr> existingCidrs = Iterables.transform(subnetsInVpc,
                        Functions.compose(Cidr.parseUnsafe(), Subnets.FilterStringFunctions.CIDR));
                if (Iterables.any(existingCidrs, subnetCidr.get().contains())
                        || Iterables.any(existingCidrs, subnetCidr.get().containedBy())) {
                    throw new ClientComputeException("InvalidSubnet.Conflict",
                            "Cidr conflict for " + request.getCidrBlock());
                }
                final NetworkAcl networkAcl = networkAcls.lookupDefault(vpc.getDisplayName(),
                        Functions.<NetworkAcl>identity());
                return subnets.save(Subnet.create(ctx.getUserFullName(), vpc, networkAcl,
                        Identifier.subnet.generate(), request.getCidrBlock(), availabilityZone.get()));
            } catch (VpcMetadataNotFoundException ex) {
                throw Exceptions.toUndeclared(new ClientComputeException("InvalidVpcID.NotFound",
                        "Vpc not found '" + request.getVpcId() + "'"));
            } catch (Exception ex) {
                throw new RuntimeException(ex);
            }
        }
    };
    reply.setSubnet(allocate(allocator, Subnet.class, SubnetType.class));
    invalidate(reply.getSubnet().getSubnetId());
    return reply;
}

From source file:com.eucalyptus.vm.VmInstances.java

/**
 * List instances that are not done and match the given owner/predicate.
 *
 * @param ownerFullName The owning user or account
 * @param predicate The predicate to match
 * @return The matching instances//from  w ww  .ja va 2s  . c  o m
 * @see VmStateSet#DONE
 */
public static List<VmInstance> list(@Nullable OwnerFullName ownerFullName,
        @Nullable Predicate<? super VmInstance> predicate) {
    return list(ownerFullName, Restrictions.not(criterion(VmStateSet.DONE.array())),
            Collections.<String, String>emptyMap(), Predicates.and(VmStateSet.DONE.not(), predicate));
}

From source file:com.eucalyptus.vm.VmInstances.java

/**
 * List instances in any state that match the given parameters.
 *///w  w w .j  a  v a2  s.c  o m
public static List<VmInstance> list(@Nullable final OwnerFullName ownerFullName, final Criterion criterion,
        final Map<String, String> aliases, @Nullable final Predicate<? super VmInstance> predicate) {
    return list(new Supplier<List<VmInstance>>() {
        @Override
        public List<VmInstance> get() {
            return Entities.query(VmInstance.named(ownerFullName, null), false, criterion, aliases);
        }
    }, Predicates.and(RestrictedTypes.filterByOwner(ownerFullName), checkPredicate(predicate)));
}

From source file:org.eclipse.papyrus.uml.diagram.activity.activitygroup.GroupRequestAdvisor.java

protected Multimap<EReference, IGroupNotifier> fillReqestWithReferendedElement(IGroupRequest request,
        boolean lookingForParent, boolean onlyContainment) {
    final Rectangle newBounds = getInitalTargetRequestNewBounds(request);
    final Multimap<EReference, IGroupNotifier> result = ArrayListMultimap.create();
    if (request.getNodeDescpitor() == null) {
        return result;
    }/*from   w w  w . j a  v a2 s.c om*/
    List<EReference> references = null;
    if (lookingForParent) {
        references = request.getNodeDescpitor().getParentReferences();
    } else {
        references = request.getNodeDescpitor().getChildrenReferences();
    }
    final Multimap<EReference, IGroupNotifier> auxResult = ArrayListMultimap.create();
    final Multimap<EReference, Element> eReferenceLookedForMap = ArrayListMultimap.create();
    getReferenceElements(request, newBounds, references, eReferenceLookedForMap, auxResult, lookingForParent,
            onlyContainment,
            lookingForParent ? request.getNodeDescpitor().getParentEOppositeReferences() : null);
    /*
     * Filter ancestors
     */
    for (EReference ref : eReferenceLookedForMap.keySet()) {
        /*
         * Filter descendant
         * Example :
         * 1 - ActPart1 include in Act1 then Act1 disappear
         * 2 - ActPart1 include in ActPart2 then ActPart1 disappear
         */
        Object adapter = request.getTargetElement().getAdapter(EObject.class);
        if (adapter instanceof Element) {
            Element element = (Element) adapter;
            Predicate<Element> composedPredicate = Predicates.and(new SameContainerFilter(element),
                    lookingForParent ? new DescendantsFilter(eReferenceLookedForMap.values())
                            : new AncestorFilter(eReferenceLookedForMap.values()));
            Collection<Element> filteredCollection = Collections2.filter(eReferenceLookedForMap.get(ref),
                    composedPredicate);
            if (lookingForParent) {
                request.getParentEReferenceMap().putAll(ref, filteredCollection);
            } else {
                request.getChildrenEReferenceMap().putAll(ref, filteredCollection);
            }
        }
    }
    for (EReference ref : auxResult.keySet()) {
        /*
         * Filter descendant
         * Example :
         * 1 - ActPart1 include in Act1 then Act1 disappear
         * 2 - ActPart1 include in ActPart2 then ActPart1 disappear
         */
        Iterable<IGroupNotifier> resultCollection = Iterables.filter(auxResult.get(ref),
                new DescendantsFilterIGroupNotifier(auxResult.values()));
        result.putAll(ref, resultCollection);
    }
    return result;
}

From source file:ezbake.security.service.processor.EzSecurityHandler.java

protected Set<String> filterCommunityAuthorizations(Set<String> auths, Set<String> appAuths,
        Set<String> targetAppAuths, List<String> chain) throws AppNotRegisteredException {

    // Set up the filter by intersecting all apps
    if (targetAppAuths != null) {
        appAuths.retainAll(targetAppAuths);
    }/*from w  w w .ja  v  a  2 s  .  c  o  m*/
    if (chain != null) {
        for (String securityIdLink : chain) {
            appAuths.addAll(Sets.newHashSet(
                    fetchApplication(securityIdLink).getRegistration().getCommunityAuthorizations()));
        }
    }

    auths = Sets.intersection(auths, appAuths);
    return Sets.filter(auths, Predicates.and(Predicates.notNull(), Predicates.not(Predicates.equalTo(""))));
}

From source file:ezbake.security.service.processor.EzSecurityHandler.java

protected Set<String> filterAuthorizations(Set<String> auths, Set<String> appAuths, Set<String> targetAppAuths,
        Set<String> preFiltered, List<String> chain) throws AppNotRegisteredException {

    if (preFiltered != null) {
        appAuths = Sets.intersection(appAuths, preFiltered);
    } else if (chain != null && !chain.isEmpty()) {
        appAuths = getAuthorizationsFromChain(Sets.newHashSet(chain), appAuths);
    }/*from  w  w  w .  jav  a  2s . c om*/

    auths = Sets.intersection(auths, appAuths);
    if (targetAppAuths != null) {
        auths = Sets.intersection(auths, targetAppAuths);
    }

    return Sets.filter(auths, Predicates.and(Predicates.notNull(), Predicates.not(Predicates.equalTo(""))));
}

From source file:edu.byu.nlp.al.app.CrowdsourcingActiveMeasurement.java

private static Dataset readData(RandomGenerator rnd, int featureNormalizationConstant) throws IOException {
    // transforms per dataset
    Function<String, String> docTransform = null;
    Function<String, String> tokenTransform = null;
    switch (datasetType) {
    // simulated datasets need no transform
    case NB2://from  ww  w  . j  a v  a2 s . co m
    case NB20:
        // pre-processed datasets need no transform
    case R8:
    case R52:
    case CADE12:
    case WEBKB:
    case NG:
    case JSON_VEC:
    case INDEXED_VEC:
        break;
    // Web Pages
    case COMPANIES:
        tokenTransform = Functions2.compose(new ShortWordFilter(2), new PorterStemmer(),
                StopWordRemover.malletStopWordRemover());
        break;
    // tweets
    case TWITTER:
        // preserved tweeted emoticons as text
        docTransform = new EmoticonTransformer();
        // order of ops is from bottom up
        tokenTransform = Functions2.compose(new ShortWordFilter(1), new PorterStemmer(),
                StopWordRemover.twitterStopWordRemover());
        break;
    case WEATHER:
        // preserved tweeted emoticons as text
        docTransform = new EmoticonTransformer();
        // order of ops is from bottom up
        tokenTransform = Functions2.compose(new ShortWordFilter(1), new PorterStemmer(),
                StopWordRemover.twitterStopWordRemover()
        //          StopWordRemover.fromWords(Sets.newHashSet("weather"))
        );
        break;
    // email 
    case ENRON:
    case NEWSGROUPS:
    case CFGROUPS1000:
    case REUTERS:
        docTransform = new EmailHeaderStripper();
        // order of ops is from bottom up
        tokenTransform = Functions2.compose(new ShortWordFilter(2), new PorterStemmer(),
                StopWordRemover.malletStopWordRemover());
        break;
    default:
        throw new IllegalStateException("unknown dataset type: " + datasetType);
    }
    // -1 => null
    Integer featureNormalizer = featureNormalizationConstant < 0 ? null : featureNormalizationConstant;

    // data reader pipeline per dataset
    // build a dataset, doing all the tokenizing, stopword removal, and feature normalization
    Dataset data;
    switch (datasetType) {
    // json annotation stream
    case CFGROUPS1000:
    case WEATHER:
    case TWITTER:
    case COMPANIES:
        data = new JSONDocumentDatasetBuilder(basedir, dataset, docTransform,
                DocPipes.opennlpSentenceSplitter(), DocPipes.McCallumAndNigamTokenizer(), tokenTransform,
                FeatureSelectorFactories.conjoin(
                        new CountCutoffFeatureSelectorFactory<String>(featureCountCutoff),
                        (topNFeaturesPerDocument < 0) ? null
                                : new TopNPerDocumentFeatureSelectorFactory(topNFeaturesPerDocument)),
                featureNormalizer).dataset();
        break;
    case ENRON:
    case NB2:
    case NB20:
    case R8:
    case R52:
    case NG:
    case WEBKB:
    case CADE12:
    case NEWSGROUPS:
    case REUTERS:
        data = new DocumentDatasetBuilder(basedir, dataset, split, docTransform,
                DocPipes.opennlpSentenceSplitter(), DocPipes.McCallumAndNigamTokenizer(), tokenTransform,
                FeatureSelectorFactories.conjoin(
                        new CountCutoffFeatureSelectorFactory<String>(featureCountCutoff),
                        (topNFeaturesPerDocument < 0) ? null
                                : new TopNPerDocumentFeatureSelectorFactory(topNFeaturesPerDocument)),
                featureNormalizer).dataset();
        break;
    case INDEXED_VEC:
        data = new VectorDocumentDatasetBuilder(basedir, dataset, split).dataset();
        break;
    case JSON_VEC:
        data = new JSONVectorDocumentDatasetBuilder(basedir, dataset).dataset();
        break;
    default:
        throw new IllegalStateException("unknown dataset type: " + datasetType);
    }

    //    // randomize order 
    //    data.shuffle(rnd);

    // Postprocessing: remove all documents with duplicate sources or empty features
    data = Datasets.filteredDataset(data,
            Predicates.and(Datasets.filterDuplicateSources(), Datasets.filterNonEmpty()));

    logger.info("\nDataset on import: \n" + Datasets.summaryOf(data, 1));

    //    for (DatasetInstance inst: data){
    //      Preconditions.checkState(inst.asFeatureVector().sum()>0,"document "+inst.getInfo().getSource()+" was empty");
    //      
    //      // print document data to make sure import didn't mess things up
    //      System.out.println(inst.getInfo().getSource()+": "+Datasets.wordsIn(inst, data.getInfo().getFeatureIndexer()));
    //    }

    return data;
}

From source file:org.apache.flex.compiler.internal.scopes.ASScope.java

/**
 * Version of findProperty that determine the results based on the namespace set passed in,
 * along with any additional constraints passed in via the {@link Predicate}.
 *
 *
 * @param project       The {@link CompilerProject} to resolve things in
 * @param baseName      The name to find
 * @param additional    Any additional constraints on the lookup.  This predicate will
 *                      run before any namespace checking occurs.
 * @param namespaceSet  The Namespace set to use for the lookup
 * @param dt            The dependency type to introduce if this resolves to something from
 *                      another compilation unit
 * @return              a List of IDefinition that matched the name, namespace set, and any
 *                      additional constraints specified by the predicate.
 *///from   www.j  a  va2 s .c  o m
private List<IDefinition> findProperty(CompilerProject project, String baseName,
        Predicate<IDefinition> additional, Set<INamespaceDefinition> namespaceSet, DependencyType dt,
        boolean findAll) {
    ArrayList<IDefinition> defs = new ArrayList<IDefinition>(1);
    NamespaceSetPredicate nsPred = new NamespaceSetPredicate(project, namespaceSet);
    Predicate<IDefinition> pred = Predicates.and(additional, nsPred);
    FilteredCollection<IDefinition> filteredCollection = new FilteredCollection<IDefinition>(pred, defs);
    findProperty(filteredCollection, project, baseName, nsPred, dt, findAll);
    return defs;
}