Example usage for com.google.common.base Predicates not

List of usage examples for com.google.common.base Predicates not

Introduction

In this page you can find the example usage for com.google.common.base Predicates not.

Prototype

public static <T> Predicate<T> not(Predicate<T> predicate) 

Source Link

Document

Returns a predicate that evaluates to true if the given predicate evaluates to false .

Usage

From source file:com.google.devtools.build.lib.rules.objc.ObjcProvider.java

@SuppressWarnings("unchecked")
private <T> void addTransitiveAndAvoid(ObjcProvider.Builder objcProviderBuilder, Key<T> key,
        Iterable<ObjcProvider> avoidProviders) {
    HashSet<T> avoidItemsSet = new HashSet<T>();
    for (ObjcProvider avoidProvider : avoidProviders) {
        avoidItemsSet.addAll(avoidProvider.getPropagable(key).toList());
    }/*  w w  w. j  av a  2 s. com*/
    addTransitiveAndFilter(objcProviderBuilder, key, Predicates.not(Predicates.in(avoidItemsSet)));
}

From source file:com.eucalyptus.autoscaling.AutoScalingService.java

public CreateAutoScalingGroupResponseType createAutoScalingGroup(final CreateAutoScalingGroupType request)
        throws EucalyptusCloudException {
    final CreateAutoScalingGroupResponseType reply = request.getReply();

    final Context ctx = Contexts.lookup();

    if (request.getTags() != null) {
        for (final TagType tagType : request.getTags().getMember()) {
            final String key = tagType.getKey();
            if (com.google.common.base.Strings.isNullOrEmpty(key) || key.trim().length() > 128
                    || isReserved(key)) {
                throw new ValidationErrorException(
                        "Invalid key (max length 128, must not be empty, reserved prefixes " + reservedPrefixes
                                + "): " + key);
            }//from   w w w . j ava  2s.  com
        }

        if (request.getTags().getMember().size() >= MAX_TAGS_PER_RESOURCE) {
            throw Exceptions.toUndeclared(new LimitExceededException("Tag limit exceeded"));
        }
    }

    final Supplier<AutoScalingGroup> allocator = new Supplier<AutoScalingGroup>() {
        @Override
        public AutoScalingGroup get() {
            try {
                final Integer minSize = Numbers.intValue(request.getMinSize());
                final Integer maxSize = Numbers.intValue(request.getMaxSize());
                final Integer desiredCapacity = Numbers.intValue(request.getDesiredCapacity());

                if (desiredCapacity != null && desiredCapacity < minSize) {
                    throw Exceptions.toUndeclared(
                            new ValidationErrorException("DesiredCapacity must not be less than MinSize"));
                }
                if (desiredCapacity != null && desiredCapacity > maxSize) {
                    throw Exceptions.toUndeclared(
                            new ValidationErrorException("DesiredCapacity must not be greater than MaxSize"));
                }

                final List<String> referenceErrors = activityManager.validateReferences(ctx.getUserFullName(),
                        request.availabilityZones(), request.loadBalancerNames());
                verifyUnsupportedReferences(referenceErrors, request.getPlacementGroup(),
                        request.getVpcZoneIdentifier());

                if (!referenceErrors.isEmpty()) {
                    throw Exceptions.toUndeclared(
                            new ValidationErrorException("Invalid parameters " + referenceErrors));
                }

                final AutoScalingGroups.PersistingBuilder builder = autoScalingGroups
                        .create(ctx.getUserFullName(), request.getAutoScalingGroupName(),
                                launchConfigurations.lookup(ctx.getUserFullName().asAccountFullName(),
                                        request.getLaunchConfigurationName(),
                                        Functions.<LaunchConfiguration>identity()),
                                minSize, maxSize)
                        .withAvailabilityZones(request.availabilityZones())
                        .withDefaultCooldown(Numbers.intValue(request.getDefaultCooldown()))
                        .withDesiredCapacity(desiredCapacity)
                        .withHealthCheckGracePeriod(Numbers.intValue(request.getHealthCheckGracePeriod()))
                        .withHealthCheckType(request.getHealthCheckType() == null ? null
                                : HealthCheckType.valueOf(request.getHealthCheckType()))
                        .withLoadBalancerNames(
                                request.loadBalancerNames())
                        .withTerminationPolicyTypes(request.terminationPolicies() == null ? null
                                : Collections2.filter(
                                        Collections2.transform(request.terminationPolicies(),
                                                Enums.valueOfFunction(TerminationPolicyType.class)),
                                        Predicates.not(Predicates.isNull())))
                        .withTags(request.getTags() == null ? null
                                : Iterables.transform(request.getTags().getMember(),
                                        TypeMappers.lookup(TagType.class, AutoScalingGroupTag.class)));

                return builder.persist();
            } catch (AutoScalingMetadataNotFoundException e) {
                throw Exceptions.toUndeclared(new ValidationErrorException(
                        "Launch configuration not found: " + request.getLaunchConfigurationName()));
            } catch (IllegalArgumentException e) {
                throw Exceptions.toUndeclared(new ValidationErrorException(
                        "Invalid health check type: " + request.getHealthCheckType()));
            } catch (Exception ex) {
                throw new RuntimeException(ex);
            }
        }
    };

    try {
        RestrictedTypes.allocateUnitlessResource(allocator);
    } catch (Exception e) {
        handleException(e, true);
    }

    return reply;
}

From source file:com.facebook.buck.cxx.CxxLibraryDescription.java

public <A extends Arg> BuildRule createBuildRule(final BuildRuleParams params, final BuildRuleResolver resolver,
        final A args, Optional<Linker.LinkableDepType> linkableDepType, final Optional<SourcePath> bundleLoader,
        ImmutableSet<BuildTarget> blacklist) throws NoSuchBuildTargetException {
    BuildTarget buildTarget = params.getBuildTarget();
    // See if we're building a particular "type" and "platform" of this library, and if so, extract
    // them from the flavors attached to the build target.
    Optional<Map.Entry<Flavor, Type>> type = getLibType(buildTarget);
    Optional<CxxPlatform> platform = cxxPlatforms.getValue(buildTarget);

    if (params.getBuildTarget().getFlavors().contains(CxxCompilationDatabase.COMPILATION_DATABASE)) {
        // XXX: This needs bundleLoader for tests..
        CxxPlatform cxxPlatform = platform.orElse(defaultCxxPlatform);
        SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
        SourcePathResolver sourcePathResolver = new SourcePathResolver(ruleFinder);
        return CxxDescriptionEnhancer.createCompilationDatabase(params, resolver, sourcePathResolver,
                ruleFinder, cxxBuckConfig, cxxPlatform, args);
    } else if (params.getBuildTarget().getFlavors()
            .contains(CxxCompilationDatabase.UBER_COMPILATION_DATABASE)) {
        return CxxDescriptionEnhancer.createUberCompilationDatabase(
                platform.isPresent() ? params : params.withFlavor(defaultCxxPlatform.getFlavor()), resolver);
    } else if (params.getBuildTarget().getFlavors().contains(CxxInferEnhancer.InferFlavors.INFER.get())) {
        return CxxInferEnhancer.requireInferAnalyzeAndReportBuildRuleForCxxDescriptionArg(params, resolver,
                new SourcePathResolver(new SourcePathRuleFinder(resolver)), cxxBuckConfig,
                platform.orElse(defaultCxxPlatform), args, inferBuckConfig,
                new CxxInferSourceFilter(inferBuckConfig));
    } else if (params.getBuildTarget().getFlavors()
            .contains(CxxInferEnhancer.InferFlavors.INFER_ANALYZE.get())) {
        return CxxInferEnhancer.requireInferAnalyzeBuildRuleForCxxDescriptionArg(params, resolver,
                new SourcePathResolver(new SourcePathRuleFinder(resolver)), cxxBuckConfig,
                platform.orElse(defaultCxxPlatform), args, inferBuckConfig,
                new CxxInferSourceFilter(inferBuckConfig));
    } else if (params.getBuildTarget().getFlavors()
            .contains(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ALL.get())) {
        return CxxInferEnhancer.requireAllTransitiveCaptureBuildRules(params, resolver, cxxBuckConfig,
                platform.orElse(defaultCxxPlatform), inferBuckConfig, new CxxInferSourceFilter(inferBuckConfig),
                args);/*  w w w. j  a v a 2 s.  c  o m*/
    } else if (params.getBuildTarget().getFlavors()
            .contains(CxxInferEnhancer.InferFlavors.INFER_CAPTURE_ONLY.get())) {
        return CxxInferEnhancer.requireInferCaptureAggregatorBuildRuleForCxxDescriptionArg(params, resolver,
                new SourcePathResolver(new SourcePathRuleFinder(resolver)), cxxBuckConfig,
                platform.orElse(defaultCxxPlatform), args, inferBuckConfig,
                new CxxInferSourceFilter(inferBuckConfig));
    } else if (type.isPresent() && platform.isPresent()) {
        // If we *are* building a specific type of this lib, call into the type specific
        // rule builder methods.

        BuildRuleParams untypedParams = getUntypedParams(params);
        switch (type.get().getValue()) {
        case HEADERS:
            return createHeaderSymlinkTreeBuildRule(untypedParams, resolver, platform.get(), args);
        case EXPORTED_HEADERS:
            return createExportedHeaderSymlinkTreeBuildRule(untypedParams, resolver, platform.get(), args);
        case SHARED:
            return createSharedLibraryBuildRule(untypedParams, resolver, cxxBuckConfig, platform.get(), args,
                    Linker.LinkType.SHARED, linkableDepType.orElse(Linker.LinkableDepType.SHARED),
                    Optional.empty(), blacklist);
        case SHARED_INTERFACE:
            return createSharedLibraryInterface(untypedParams, resolver, platform.get());
        case MACH_O_BUNDLE:
            return createSharedLibraryBuildRule(untypedParams, resolver, cxxBuckConfig, platform.get(), args,
                    Linker.LinkType.MACH_O_BUNDLE, linkableDepType.orElse(Linker.LinkableDepType.SHARED),
                    bundleLoader, blacklist);
        case STATIC:
            return createStaticLibraryBuildRule(untypedParams, resolver, cxxBuckConfig, platform.get(), args,
                    CxxSourceRuleFactory.PicType.PDC);
        case STATIC_PIC:
            return createStaticLibraryBuildRule(untypedParams, resolver, cxxBuckConfig, platform.get(), args,
                    CxxSourceRuleFactory.PicType.PIC);
        case SANDBOX_TREE:
            return CxxDescriptionEnhancer.createSandboxTreeBuildRule(resolver, args, platform.get(),
                    untypedParams);
        }
        throw new RuntimeException("unhandled library build type");
    }

    boolean hasObjectsForAnyPlatform = !args.srcs.isEmpty();
    Predicate<CxxPlatform> hasObjects;
    if (hasObjectsForAnyPlatform) {
        hasObjects = x -> true;
    } else {
        hasObjects = input -> !args.platformSrcs.getMatchingValues(input.getFlavor().toString()).isEmpty();
    }

    Predicate<CxxPlatform> hasExportedHeaders;
    if (!args.exportedHeaders.isEmpty()) {
        hasExportedHeaders = x -> true;
    } else {
        hasExportedHeaders = input -> !args.exportedPlatformHeaders
                .getMatchingValues(input.getFlavor().toString()).isEmpty();
    }

    // Otherwise, we return the generic placeholder of this library, that dependents can use
    // get the real build rules via querying the action graph.
    SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
    final SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
    return new CxxLibrary(params, resolver, pathResolver,
            FluentIterable.from(args.exportedDeps).transform(resolver::getRule), hasExportedHeaders,
            Predicates.not(hasObjects),
            input -> CxxFlags.getLanguageFlags(args.exportedPreprocessorFlags,
                    args.exportedPlatformPreprocessorFlags, args.exportedLangPreprocessorFlags, input),
            input -> {
                ImmutableList<String> flags = CxxFlags.getFlags(args.exportedLinkerFlags,
                        args.exportedPlatformLinkerFlags, input);
                return Iterables.transform(flags, MacroArg.toMacroArgFunction(MACRO_HANDLER,
                        params.getBuildTarget(), params.getCellRoots(), resolver));
            }, cxxPlatform -> {
                try {
                    return getSharedLibraryNativeLinkTargetInput(params, resolver, pathResolver, ruleFinder,
                            cxxBuckConfig, cxxPlatform, args,
                            CxxFlags.getFlags(args.linkerFlags, args.platformLinkerFlags, cxxPlatform),
                            CxxFlags.getFlags(args.exportedLinkerFlags, args.exportedPlatformLinkerFlags,
                                    cxxPlatform),
                            args.frameworks, args.libraries);
                } catch (NoSuchBuildTargetException e) {
                    throw new RuntimeException(e);
                }
            }, args.supportedPlatformsRegex, args.frameworks, args.libraries,
            args.forceStatic.orElse(false) ? NativeLinkable.Linkage.STATIC
                    : args.preferredLinkage.orElse(NativeLinkable.Linkage.ANY),
            args.linkWhole.orElse(false), args.soname, args.tests, args.canBeAsset.orElse(false));
}

From source file:io.crate.test.integration.CrateTestCluster.java

/**
 * Stops the any of the current nodes but not the master node.
 *//*from ww w. j a v a 2  s.co m*/
public void stopRandomNonMasterNode() {
    NodeAndClient nodeAndClient = getRandomNodeAndClient(
            Predicates.not(new MasterNodePredicate(getMasterName())));
    if (nodeAndClient != null) {
        logger.info("Closing random non master node [{}] current master [{}] ", nodeAndClient.name,
                getMasterName());
        nodes.remove(nodeAndClient.name);
        nodeAndClient.close();
    }
}

From source file:com.google.devtools.build.lib.rules.objc.CompilationSupport.java

/**
 * Sets compilation-related Xcode project information on the given provider builder.
 *
 * @param common common information about this rule's attributes and its dependencies
 * @return this compilation support/* w w  w.ja  v  a 2s .c  o  m*/
 */
CompilationSupport addXcodeSettings(Builder xcodeProviderBuilder, ObjcCommon common) {
    for (CompilationArtifacts artifacts : common.getCompilationArtifacts().asSet()) {
        xcodeProviderBuilder.setCompilationArtifacts(artifacts);
    }

    // The include directory options ("-I") are parsed out of copts. The include directories are
    // added as non-propagated header search paths local to the associated Xcode target.
    Iterable<String> copts = Iterables.concat(objcConfiguration.getCopts(), attributes.copts());
    Iterable<String> includeDirOptions = Iterables.filter(copts, INCLUDE_DIR_OPTION_IN_COPTS);
    Iterable<String> coptsWithoutIncludeDirs = Iterables.filter(copts,
            Predicates.not(INCLUDE_DIR_OPTION_IN_COPTS));
    ImmutableList.Builder<PathFragment> nonPropagatedHeaderSearchPaths = new ImmutableList.Builder<>();
    for (String includeDirOption : includeDirOptions) {
        nonPropagatedHeaderSearchPaths.add(new PathFragment(includeDirOption.substring(2)));
    }

    // We also need to add the -isystem directories from the CC header providers. ObjCommon
    // adds these to the objcProvider, so let's just get them from there.
    Iterable<PathFragment> includeSystemPaths = common.getObjcProvider().get(INCLUDE_SYSTEM);

    xcodeProviderBuilder.addHeaders(attributes.hdrs()).addHeaders(attributes.textualHdrs())
            .addUserHeaderSearchPaths(ObjcCommon.userHeaderSearchPaths(buildConfiguration))
            .addHeaderSearchPaths("$(WORKSPACE_ROOT)",
                    attributes.headerSearchPaths(buildConfiguration.getGenfilesFragment()))
            .addHeaderSearchPaths("$(WORKSPACE_ROOT)", includeSystemPaths)
            .addHeaderSearchPaths("$(SDKROOT)/usr/include", attributes.sdkIncludes())
            .addNonPropagatedHeaderSearchPaths("$(WORKSPACE_ROOT)", nonPropagatedHeaderSearchPaths.build())
            .addCompilationModeCopts(objcConfiguration.getCoptsForCompilationMode())
            .addCopts(coptsWithoutIncludeDirs);

    return this;
}

From source file:com.google.devtools.build.lib.rules.objc.LegacyCompilationSupport.java

private CommandLine linkCommandLine(ExtraLinkArgs extraLinkArgs, ObjcProvider objcProvider,
        Artifact linkedBinary, Optional<Artifact> dsymBundleZip, Iterable<Artifact> ccLibraries,
        Iterable<Artifact> bazelBuiltLibraries, Optional<Artifact> linkmap) {
    Iterable<String> libraryNames = libraryNames(objcProvider);

    CustomCommandLine.Builder commandLine = CustomCommandLine.builder()
            .addPath(xcrunwrapper(ruleContext).getExecutable().getExecPath());
    if (objcProvider.is(USES_CPP)) {
        commandLine.add(CLANG_PLUSPLUS).add("-stdlib=libc++").add("-std=gnu++11");
    } else {/*from   w w  w . j  a v  a  2  s  .co m*/
        commandLine.add(CLANG);
    }

    // Do not perform code stripping on tests because XCTest binary is linked not as an executable
    // but as a bundle without any entry point.
    boolean isTestTarget = TargetUtils.isTestRule(ruleContext.getRule());
    if (objcConfiguration.shouldStripBinary() && !isTestTarget) {
        commandLine.add("-dead_strip").add("-no_dead_strip_inits_and_terms");
    }

    Iterable<Artifact> ccLibrariesToForceLoad = Iterables.filter(ccLibraries, ALWAYS_LINKED_CC_LIBRARY);

    ImmutableSet<Artifact> forceLinkArtifacts = ImmutableSet.<Artifact>builder()
            .addAll(objcProvider.get(FORCE_LOAD_LIBRARY)).addAll(ccLibrariesToForceLoad).build();

    Artifact inputFileList = intermediateArtifacts.linkerObjList();
    Iterable<Artifact> objFiles = Iterables.concat(bazelBuiltLibraries, objcProvider.get(IMPORTED_LIBRARY),
            ccLibraries);
    // Clang loads archives specified in filelists and also specified as -force_load twice,
    // resulting in duplicate symbol errors unless they are deduped.
    objFiles = Iterables.filter(objFiles, Predicates.not(Predicates.in(forceLinkArtifacts)));

    registerObjFilelistAction(objFiles, inputFileList);

    if (objcConfiguration.shouldPrioritizeStaticLibs()) {
        commandLine.add("-filelist").add(inputFileList.getExecPathString());
    }

    AppleBitcodeMode bitcodeMode = appleConfiguration.getBitcodeMode();
    commandLine.add(bitcodeMode.getCompileAndLinkFlags());

    if (bitcodeMode == AppleBitcodeMode.EMBEDDED) {
        commandLine.add("-Xlinker").add("-bitcode_verify");
        commandLine.add("-Xlinker").add("-bitcode_hide_symbols");
        // TODO(b/32910627): Add Bitcode symbol maps outputs.
    }

    commandLine.add(commonLinkAndCompileFlagsForClang(objcProvider, objcConfiguration, appleConfiguration))
            .add("-Xlinker").add("-objc_abi_version").add("-Xlinker").add("2")
            // Set the rpath so that at runtime dylibs can be loaded from the bundle root's "Frameworks"
            // directory.
            .add("-Xlinker").add("-rpath").add("-Xlinker").add("@executable_path/Frameworks")
            .add("-fobjc-link-runtime").add(DEFAULT_LINKER_FLAGS)
            .addBeforeEach("-framework", frameworkNames(objcProvider))
            .addBeforeEach("-weak_framework", SdkFramework.names(objcProvider.get(WEAK_SDK_FRAMEWORK)))
            .addFormatEach("-l%s", libraryNames);

    if (!objcConfiguration.shouldPrioritizeStaticLibs()) {
        commandLine.add("-filelist").add(inputFileList.getExecPathString());
    }

    commandLine.addExecPath("-o", linkedBinary).addBeforeEachExecPath("-force_load", forceLinkArtifacts)
            .add(extraLinkArgs).add(objcProvider.get(ObjcProvider.LINKOPT));

    if (buildConfiguration.isCodeCoverageEnabled()) {
        if (buildConfiguration.isLLVMCoverageMapFormatEnabled()) {
            commandLine.add(LINKER_LLVM_COVERAGE_FLAGS);
        } else {
            commandLine.add(LINKER_COVERAGE_FLAGS);
        }
    }

    for (String linkopt : attributes.linkopts()) {
        commandLine.add("-Wl," + linkopt);
    }

    if (linkmap.isPresent()) {
        commandLine.add("-Xlinker -map").add("-Xlinker " + linkmap.get().getExecPath());
    }

    // Call to dsymutil for debug symbol generation must happen in the link action.
    // All debug symbol information is encoded in object files inside archive files. To generate
    // the debug symbol bundle, dsymutil will look inside the linked binary for the encoded
    // absolute paths to archive files, which are only valid in the link action.
    if (dsymBundleZip.isPresent()) {
        PathFragment dsymPath = FileSystemUtils.removeExtension(dsymBundleZip.get().getExecPath());
        commandLine.add("&&").addPath(xcrunwrapper(ruleContext).getExecutable().getExecPath()).add(DSYMUTIL)
                .add(linkedBinary.getExecPathString()).add("-o " + dsymPath)
                .add("&& zipped_bundle=${PWD}/" + dsymBundleZip.get().getShellEscapedExecPathString())
                .add("&& cd " + dsymPath).add("&& /usr/bin/zip -q -r \"${zipped_bundle}\" .");
    }

    return commandLine.build();
}

From source file:org.elasticsearch.test.hamcrest.ElasticsearchAssertions.java

public static void assertNodeContainsPlugins(NodesInfoResponse response, String nodeId,
        List<String> expectedJvmPluginNames, List<String> expectedJvmPluginDescriptions,
        List<String> expectedJvmVersions, List<String> expectedSitePluginNames,
        List<String> expectedSitePluginDescriptions, List<String> expectedSiteVersions) {

    Assert.assertThat(response.getNodesMap().get(nodeId), notNullValue());

    PluginsInfo plugins = response.getNodesMap().get(nodeId).getPlugins();
    Assert.assertThat(plugins, notNullValue());

    List<String> pluginNames = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate)
            .transform(nameFunction).toList();
    for (String expectedJvmPluginName : expectedJvmPluginNames) {
        Assert.assertThat(pluginNames, hasItem(expectedJvmPluginName));
    }/*from  w w w . j  a  v a 2  s . c om*/

    List<String> pluginDescriptions = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate)
            .transform(descriptionFunction).toList();
    for (String expectedJvmPluginDescription : expectedJvmPluginDescriptions) {
        Assert.assertThat(pluginDescriptions, hasItem(expectedJvmPluginDescription));
    }

    List<String> jvmPluginVersions = FluentIterable.from(plugins.getInfos()).filter(jvmPluginPredicate)
            .transform(versionFunction).toList();
    for (String pluginVersion : expectedJvmVersions) {
        Assert.assertThat(jvmPluginVersions, hasItem(pluginVersion));
    }

    FluentIterable<String> jvmUrls = FluentIterable.from(plugins.getInfos())
            .filter(Predicates.and(jvmPluginPredicate, Predicates.not(sitePluginPredicate))).filter(isNull())
            .transform(urlFunction);
    Assert.assertThat(Iterables.size(jvmUrls), is(0));

    List<String> sitePluginNames = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate)
            .transform(nameFunction).toList();
    Assert.assertThat(sitePluginNames.isEmpty(), is(expectedSitePluginNames.isEmpty()));
    for (String expectedSitePluginName : expectedSitePluginNames) {
        Assert.assertThat(sitePluginNames, hasItem(expectedSitePluginName));
    }

    List<String> sitePluginDescriptions = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate)
            .transform(descriptionFunction).toList();
    Assert.assertThat(sitePluginDescriptions.isEmpty(), is(expectedSitePluginDescriptions.isEmpty()));
    for (String sitePluginDescription : expectedSitePluginDescriptions) {
        Assert.assertThat(sitePluginDescriptions, hasItem(sitePluginDescription));
    }

    List<String> sitePluginUrls = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate)
            .transform(urlFunction).toList();
    Assert.assertThat(sitePluginUrls, not(contains(nullValue())));

    List<String> sitePluginVersions = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate)
            .transform(versionFunction).toList();
    Assert.assertThat(sitePluginVersions.isEmpty(), is(expectedSiteVersions.isEmpty()));
    for (String pluginVersion : expectedSiteVersions) {
        Assert.assertThat(sitePluginVersions, hasItem(pluginVersion));
    }
}

From source file:forge.learnedai.AiBlockController.java

/** Support blockers not destroying the attacker with more blockers to try to kill the attacker */
private void reinforceBlockersToKill(final Combat combat) {

    List<Card> safeBlockers;
    List<Card> blockers;/* w ww  .j a v a 2s.c o  m*/

    List<Card> targetAttackers = CardLists.filter(blockedButUnkilled,
            Predicates.not(rampagesOrNeedsManyToBlock));

    // TODO - should check here for a "rampage-like" trigger that replaced
    // the keyword: "Whenever CARDNAME becomes blocked, it gets +1/+1 until end of turn for each creature blocking it."

    for (final Card attacker : targetAttackers) {
        blockers = getPossibleBlockers(combat, attacker, blockersLeft, false);
        blockers.removeAll(combat.getBlockers(attacker));

        // Try to use safe blockers first
        if (blockers.size() > 1) {
            safeBlockers = getSafeBlockers(combat, attacker, blockers);
            for (final Card blocker : safeBlockers) {
                final int damageNeeded = ComputerUtilCombat.getDamageToKill(attacker)
                        + ComputerUtilCombat.predictToughnessBonusOfAttacker(attacker, blocker, combat, false);
                // Add an additional blocker if the current blockers are not
                // enough and the new one would deal additional damage
                if (damageNeeded > ComputerUtilCombat.totalDamageOfBlockers(attacker,
                        combat.getBlockers(attacker))
                        && ComputerUtilCombat.dealsDamageAsBlocker(attacker, blocker) > 0
                        && CombatUtil.canBlock(attacker, blocker, combat)) {
                    combat.addBlocker(attacker, blocker);
                }
                blockers.remove(blocker); // Don't check them again next
            }
        }

        // Try to add blockers that could be destroyed, but are worth less than the attacker
        // Don't use blockers without First Strike or Double Strike if attacker has it
        if (ComputerUtilCombat.dealsFirstStrikeDamage(attacker, false, combat)) {
            safeBlockers = CardLists.getKeyword(blockers, "First Strike");
            safeBlockers.addAll(CardLists.getKeyword(blockers, "Double Strike"));
        } else {
            safeBlockers = new ArrayList<>(blockers);
        }

        for (final Card blocker : safeBlockers) {
            final int damageNeeded = ComputerUtilCombat.getDamageToKill(attacker)
                    + ComputerUtilCombat.predictToughnessBonusOfAttacker(attacker, blocker, combat, false);
            // Add an additional blocker if the current blockers are not
            // enough and the new one would deal the remaining damage
            final int currentDamage = ComputerUtilCombat.totalDamageOfBlockers(attacker,
                    combat.getBlockers(attacker));
            final int additionalDamage = ComputerUtilCombat.dealsDamageAsBlocker(attacker, blocker);
            if (damageNeeded > currentDamage && damageNeeded <= currentDamage + additionalDamage
                    && ComputerUtilCard.evaluateCreature(blocker) + diff < ComputerUtilCard
                            .evaluateCreature(attacker)
                    && CombatUtil.canBlock(attacker, blocker, combat)) {
                combat.addBlocker(attacker, blocker);
                blockersLeft.remove(blocker);
            }
        }
    }
}

From source file:forge.ai.AiBlockController.java

/** Support blockers not destroying the attacker with more blockers to try to kill the attacker */
private void reinforceBlockersToKill(final Combat combat) {

    List<Card> safeBlockers;
    List<Card> blockers;// ww w . j  av a  2  s  .co  m
    List<Card> targetAttackers = CardLists.filter(blockedButUnkilled,
            Predicates.not(rampagesOrNeedsManyToBlock));

    // TODO - should check here for a "rampage-like" trigger that replaced
    // the keyword: "Whenever CARDNAME becomes blocked, it gets +1/+1 until end of turn for each creature blocking it."

    for (final Card attacker : targetAttackers) {
        blockers = getPossibleBlockers(combat, attacker, blockersLeft, false);
        blockers.removeAll(combat.getBlockers(attacker));

        // Try to use safe blockers first
        if (blockers.size() > 1) {
            safeBlockers = getSafeBlockers(combat, attacker, blockers);
            for (final Card blocker : safeBlockers) {
                final int damageNeeded = ComputerUtilCombat.getDamageToKill(attacker)
                        + ComputerUtilCombat.predictToughnessBonusOfAttacker(attacker, blocker, combat, false);
                // Add an additional blocker if the current blockers are not
                // enough and the new one would deal additional damage
                if (damageNeeded > ComputerUtilCombat.totalDamageOfBlockers(attacker,
                        combat.getBlockers(attacker))
                        && ComputerUtilCombat.dealsDamageAsBlocker(attacker, blocker) > 0
                        && CombatUtil.canBlock(attacker, blocker, combat)) {
                    combat.addBlocker(attacker, blocker);
                }
                blockers.remove(blocker); // Don't check them again next
            }
        }
        // don't try to kill what can't be killed
        if (attacker.hasKeyword("indestructible") || ComputerUtil.canRegenerate(ai, attacker)) {
            continue;
        }

        // Try to add blockers that could be destroyed, but are worth less than the attacker
        // Don't use blockers without First Strike or Double Strike if attacker has it
        if (ComputerUtilCombat.dealsFirstStrikeDamage(attacker, false, combat)) {
            safeBlockers = CardLists.getKeyword(blockers, "First Strike");
            safeBlockers.addAll(CardLists.getKeyword(blockers, "Double Strike"));
        } else {
            safeBlockers = new ArrayList<>(blockers);
        }

        for (final Card blocker : safeBlockers) {
            final int damageNeeded = ComputerUtilCombat.getDamageToKill(attacker)
                    + ComputerUtilCombat.predictToughnessBonusOfAttacker(attacker, blocker, combat, false);
            // Add an additional blocker if the current blockers are not
            // enough and the new one would deal the remaining damage
            final int currentDamage = ComputerUtilCombat.totalDamageOfBlockers(attacker,
                    combat.getBlockers(attacker));
            final int additionalDamage = ComputerUtilCombat.dealsDamageAsBlocker(attacker, blocker);
            if (damageNeeded > currentDamage && damageNeeded <= currentDamage + additionalDamage
                    && ComputerUtilCard.evaluateCreature(blocker) + diff < ComputerUtilCard
                            .evaluateCreature(attacker)
                    && CombatUtil.canBlock(attacker, blocker, combat)
                    && !ComputerUtilCombat.canDestroyBlockerBeforeFirstStrike(blocker, attacker, false)) {
                combat.addBlocker(attacker, blocker);
                blockersLeft.remove(blocker);
            }
        }
    }
}

From source file:com.palantir.atlasdb.transaction.impl.SnapshotTransaction.java

private static Iterator<Entry<Cell, byte[]>> mergeInLocalWrites(
        Iterator<Entry<Cell, byte[]>> postFilterIterator, Iterator<Entry<Cell, byte[]>> localWritesInRange,
        boolean isReverse) {
    Ordering<Entry<Cell, byte[]>> ordering = Ordering.natural()
            .onResultOf(MapEntries.<Cell, byte[]>getKeyFunction());
    Iterator<Entry<Cell, byte[]>> mergeIterators = IteratorUtils.mergeIterators(postFilterIterator,
            localWritesInRange, isReverse ? ordering.reverse() : ordering,
            new Function<Pair<Entry<Cell, byte[]>, Entry<Cell, byte[]>>, Entry<Cell, byte[]>>() {
                @Override/*from w  ww  .j  a  va 2s .c o  m*/
                public Map.Entry<Cell, byte[]> apply(
                        Pair<Map.Entry<Cell, byte[]>, Map.Entry<Cell, byte[]>> from) {
                    // always override their value with written values
                    return from.rhSide;
                }
            });
    return Iterators.filter(mergeIterators,
            Predicates.compose(Predicates.not(Value.IS_EMPTY), MapEntries.<Cell, byte[]>getValueFunction()));
}