Example usage for com.google.common.base Predicates compose

List of usage examples for com.google.common.base Predicates compose

Introduction

In this page you can find the example usage for com.google.common.base Predicates compose.

Prototype

public static <A, B> Predicate<A> compose(Predicate<B> predicate, Function<A, ? extends B> function) 

Source Link

Document

Returns the composition of a function and a predicate.

Usage

From source file:org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState.java

static Iterable<? extends PropertyState> getProperties(NodeState base, Map<String, PropertyState> properties,
        boolean copy) {
    if (!base.exists()) {
        return emptyList();
    } else if (properties.isEmpty()) {
        return base.getProperties(); // shortcut
    } else {/*from  w  w w .  j  a  va2 s . com*/
        if (copy) {
            properties = newHashMap(properties);
        }
        Predicate<PropertyState> predicate = Predicates.compose(not(in(properties.keySet())), GET_NAME);
        return concat(filter(base.getProperties(), predicate), filter(properties.values(), notNull()));
    }
}

From source file:net.derquinse.common.meta.MetaProperty.java

/**
 * Returns the composition of this property function and a predicate on the value.
 * @return The composition of this function and the provided predicate.
 *//*from w w w .j a va2 s.co  m*/
public final Predicate<C> compose(Predicate<? super T> predicate) {
    return Predicates.compose(predicate, this);
}

From source file:com.eucalyptus.autoscaling.tags.Tags.java

private static Tag lookup(final Tag example) throws AutoScalingMetadataNotFoundException {
    try {/*from   ww w  . j a  va2s .c o  m*/
        final List<Tag> result = Transactions.filter(example,
                Predicates.compose(Predicates.equalTo(example.getResourceId()), Tags.resourceId()));
        if (result.size() == 1) {
            return result.get(0);
        }
    } catch (Exception e) {
        throw new AutoScalingMetadataNotFoundException(
                "Failed to find tag: " + example.getKey() + " for " + example.getOwner(), e);
    }

    throw new AutoScalingMetadataNotFoundException(
            "Failed to find unique tag: " + example.getKey() + " for " + example.getOwner());
}

From source file:com.eucalyptus.tags.Tags.java

private static Tag lookup(final Tag example) throws NoSuchMetadataException {
    try {//w ww . j a  v  a 2  s .co m
        final List<Tag> result = Transactions.filter(example,
                Predicates.compose(Predicates.equalTo(example.getResourceId()), Tags.resourceId()));
        if (result.size() == 1) {
            return result.get(0);
        }
    } catch (Exception e) {
        throw new NoSuchMetadataException(
                "Failed to find tag: " + example.getKey() + " for " + example.getOwner(), e);
    }

    throw new NoSuchMetadataException(
            "Failed to find unique tag: " + example.getKey() + " for " + example.getOwner());
}

From source file:com.isotrol.impe3.api.DevicesInPortal.java

/**
 * Filters the collection by device name use.
 * @param use Use to filter.//ww w  . j av a2 s.co  m
 * @return The filtered collection.
 */
public final DevicesInPortal filterByUse(DeviceNameUse use) {
    checkNotNull(use, "Device name use");
    Predicate<DeviceInPortal> f = Predicates.compose(equalTo(use), DeviceInPortal.USE);
    if (Iterables.any(values(), f)) {
        return new Filter(this, f);
    } else {
        return EMPTY;
    }
}

From source file:com.android.builder.internal.packaging.IncrementalPackager.java

/**
 * Updates files in the archive./*from  w w  w. ja  v a  2  s. c  o m*/
 *
 * @param updates the updates to perform
 * @throws IOException failed to update the archive
 */
private void updateFiles(@NonNull Set<PackagedFileUpdate> updates) throws IOException {
    Preconditions.checkNotNull(mApkCreator, "mApkCreator == null");

    Iterable<String> deletedPaths = Iterables
            .transform(
                    Iterables
                            .filter(updates,
                                    Predicates.compose(Predicates.equalTo(FileStatus.REMOVED),
                                            PackagedFileUpdate.EXTRACT_STATUS)),
                    PackagedFileUpdate.EXTRACT_NAME);

    for (String deletedPath : deletedPaths) {
        mApkCreator.deleteFile(deletedPath);
    }

    Predicate<PackagedFileUpdate> isNewOrChanged = Predicates.compose(
            Predicates.or(Predicates.equalTo(FileStatus.NEW), Predicates.equalTo(FileStatus.CHANGED)),
            PackagedFileUpdate.EXTRACT_STATUS);

    Function<PackagedFileUpdate, File> extractBaseFile = Functions.compose(RelativeFile.EXTRACT_BASE,
            PackagedFileUpdate.EXTRACT_SOURCE);

    Iterable<PackagedFileUpdate> newOrChangedNonArchiveFiles = Iterables.filter(updates,
            Predicates.and(isNewOrChanged, Predicates.compose(Files.isDirectory(), extractBaseFile)));

    for (PackagedFileUpdate rf : newOrChangedNonArchiveFiles) {
        mApkCreator.writeFile(rf.getSource().getFile(), rf.getName());
    }

    Iterable<PackagedFileUpdate> newOrChangedArchiveFiles = Iterables.filter(updates,
            Predicates.and(isNewOrChanged, Predicates.compose(Files.isFile(), extractBaseFile)));

    Iterable<File> archives = Iterables.transform(newOrChangedArchiveFiles, extractBaseFile);
    Set<String> names = Sets
            .newHashSet(Iterables.transform(newOrChangedArchiveFiles, PackagedFileUpdate.EXTRACT_NAME));

    /*
     * Build the name map. The name of the file in the filesystem (or zip file) may not
     * match the name we want to package it as. See PackagedFileUpdate for more information.
     */
    Map<String, String> pathNameMap = Maps.newHashMap();
    for (PackagedFileUpdate archiveUpdate : newOrChangedArchiveFiles) {
        pathNameMap.put(archiveUpdate.getSource().getOsIndependentRelativePath(), archiveUpdate.getName());
    }

    for (File arch : Sets.newHashSet(archives)) {
        mApkCreator.writeZip(arch, pathNameMap::get, name -> !names.contains(name));
    }
}

From source file:brooklyn.entity.nosql.couchbase.CouchbaseClusterImpl.java

public void init() {
    log.info("Initializing the Couchbase cluster...");
    super.init();

    addEnricher(Enrichers.builder().transforming(COUCHBASE_CLUSTER_UP_NODES).from(this)
            .publishing(COUCHBASE_CLUSTER_UP_NODE_ADDRESSES).computing(new ListOfHostAndPort()).build());
    addEnricher(//w w w.j  av a 2 s . co m
            Enrichers
                    .builder().transforming(COUCHBASE_CLUSTER_UP_NODE_ADDRESSES).from(
                            this)
                    .publishing(
                            COUCHBASE_CLUSTER_CONNECTION_URL)
                    .computing(
                            IfFunctions
                                    .<List<String>>ifPredicate(Predicates.compose(
                                            MathPredicates
                                                    .lessThan(getConfig(CouchbaseCluster.INITIAL_QUORUM_SIZE)),
                                            CollectionFunctionals.sizeFunction(0)))
                                    .value((String) null)
                                    .defaultApply(Functionals.chain(
                                            CollectionFunctionals.<String, List<String>>limit(4),
                                            StringFunctions.joiner(","),
                                            StringFunctions.formatter("http://%s/"))))
                    .build());

    Map<? extends AttributeSensor<? extends Number>, ? extends AttributeSensor<? extends Number>> enricherSetup = ImmutableMap
            .<AttributeSensor<? extends Number>, AttributeSensor<? extends Number>>builder()
            .put(CouchbaseNode.OPS, CouchbaseCluster.OPS_PER_NODE)
            .put(CouchbaseNode.COUCH_DOCS_DATA_SIZE, CouchbaseCluster.COUCH_DOCS_DATA_SIZE_PER_NODE)
            .put(CouchbaseNode.COUCH_DOCS_ACTUAL_DISK_SIZE,
                    CouchbaseCluster.COUCH_DOCS_ACTUAL_DISK_SIZE_PER_NODE)
            .put(CouchbaseNode.EP_BG_FETCHED, CouchbaseCluster.EP_BG_FETCHED_PER_NODE)
            .put(CouchbaseNode.MEM_USED, CouchbaseCluster.MEM_USED_PER_NODE)
            .put(CouchbaseNode.COUCH_VIEWS_ACTUAL_DISK_SIZE,
                    CouchbaseCluster.COUCH_VIEWS_ACTUAL_DISK_SIZE_PER_NODE)
            .put(CouchbaseNode.CURR_ITEMS, CouchbaseCluster.CURR_ITEMS_PER_NODE)
            .put(CouchbaseNode.VB_REPLICA_CURR_ITEMS, CouchbaseCluster.VB_REPLICA_CURR_ITEMS_PER_NODE)
            .put(CouchbaseNode.COUCH_VIEWS_DATA_SIZE, CouchbaseCluster.COUCH_VIEWS_DATA_SIZE_PER_NODE)
            .put(CouchbaseNode.GET_HITS, CouchbaseCluster.GET_HITS_PER_NODE)
            .put(CouchbaseNode.CMD_GET, CouchbaseCluster.CMD_GET_PER_NODE)
            .put(CouchbaseNode.CURR_ITEMS_TOT, CouchbaseCluster.CURR_ITEMS_TOT_PER_NODE).build();

    for (AttributeSensor<? extends Number> nodeSensor : enricherSetup.keySet()) {
        addSummingMemberEnricher(nodeSensor);
        addAveragingMemberEnricher(nodeSensor, enricherSetup.get(nodeSensor));
    }

    addEnricher(Enrichers.builder().updatingMap(Attributes.SERVICE_NOT_UP_INDICATORS)
            .from(IS_CLUSTER_INITIALIZED).computing(IfFunctions.ifNotEquals(true)
                    .value("The cluster is not yet completely initialized").defaultValue(null).build())
            .build());
}

From source file:com.isotrol.impe3.api.DevicesInPortal.java

/**
 * Excludes one device from the collection.
 * @param device Device to exclude.//from   w  w w . jav a 2 s. c o m
 * @return The filtered collection.
 */
public final DevicesInPortal excludeDevice(Device device) {
    checkNotNull(device, "Null devices not allowed");
    if (!containsDevice(device)) {
        return this;
    }
    if (size() == 1) {
        return EMPTY;
    }
    return new Filter(this, Predicates.compose(not(equalTo(device)), DeviceInPortal.DEVICE));
}

From source file:org.apache.aurora.scheduler.sla.MetricCalculator.java

@Timed("sla_stats_computation")
@Override// ww w.j av  a 2  s. c o  m
public void run() {
    FluentIterable<IScheduledTask> tasks = FluentIterable
            .from(Storage.Util.fetchTasks(storage, Query.unscoped()));

    List<IScheduledTask> prodTasks = tasks
            .filter(Predicates.compose(Predicates.and(ITaskConfig::isProduction, IS_SERVICE), Tasks::getConfig))
            .toList();

    List<IScheduledTask> nonProdTasks = tasks.filter(Predicates
            .compose(Predicates.and(Predicates.not(ITaskConfig::isProduction), IS_SERVICE), Tasks::getConfig))
            .toList();

    long nowMs = clock.nowMillis();
    Range<Long> timeRange = Range.closedOpen(nowMs - settings.refreshRateMs, nowMs);

    runAlgorithms(prodTasks, settings.prodMetrics, timeRange, NAME_QUALIFIER_PROD);
    runAlgorithms(nonProdTasks, settings.nonProdMetrics, timeRange, NAME_QUALIFIER_NON_PROD);
}

From source file:org.apache.brooklyn.util.text.StringPredicates.java

/** true if the object *is* a {@link CharSequence} starting with the given prefix */
public static Predicate<Object> isStringStartingWith(final String prefix) {
    return Predicates.<Object>and(Predicates.instanceOf(CharSequence.class),
            Predicates.compose(startsWith(prefix), StringFunctions.toStringFunction()));
}