Example usage for com.google.common.base Predicates notNull

List of usage examples for com.google.common.base Predicates notNull

Introduction

In this page you can find the example usage for com.google.common.base Predicates notNull.

Prototype

@GwtCompatible(serializable = true)
public static <T> Predicate<T> notNull() 

Source Link

Document

Returns a predicate that evaluates to true if the object reference being tested is not null.

Usage

From source file:org.trancecode.xproc.step.AbstractCompoundStepProcessor.java

protected Environment runSteps(final Iterable<Step> steps, final Environment environment) {
    LOG.trace("steps = {}", steps);

    final Environment initialEnvironment = environment.newChildStepEnvironment();
    final EnvironmentPort parametersPort = environment.getDefaultParametersPort();
    LOG.trace("  parametersPort = {}", parametersPort);

    final Map<Step, Iterable<Step>> stepDependencies = Step.getSubpipelineStepDependencies(steps);
    final Map<Step, Future<Environment>> stepResults = new ConcurrentHashMap<>();
    final List<Future<Environment>> results = Lists.newArrayList();
    final AtomicReference<Throwable> error = new AtomicReference<>();
    for (final Step step : steps) {
        final Future<Environment> result = environment.getPipelineContext().getExecutor().submit(() -> {
            // shortcut in case an error was reported by another
            // task
            if (error.get() != null) {
                throw new IllegalStateException(error.get());
            }/*from  w  w  w  . j  a  va2  s  .c  o  m*/

            Environment inputEnvironment = initialEnvironment;
            for (final Step dependency : stepDependencies.get(step)) {
                try {
                    final Environment dependencyResult = stepResults.get(dependency).get();
                    inputEnvironment = inputEnvironment.addPorts(dependencyResult.getOutputPorts());
                    inputEnvironment = inputEnvironment
                            .setDefaultReadablePort(dependencyResult.getDefaultReadablePort());
                    inputEnvironment = inputEnvironment.setDefaultParametersPort(parametersPort);
                    inputEnvironment = inputEnvironment
                            .setXPathContextPort(dependencyResult.getXPathContextPort());
                } catch (final ExecutionException e) {
                    throw Throwables.propagate(e.getCause());
                }
            }

            Environment.setCurrentNamespaceContext(step.getNode());
            inputEnvironment.setCurrentEnvironment();
            return step.run(inputEnvironment);
        });
        stepResults.put(step, result);
        results.add(result);
    }

    final Iterable<Environment> resultEnvironments;
    try {
        resultEnvironments = TcFutures.get(results);
    } catch (final ExecutionException e) {
        TcFutures.cancel(results);
        throw Throwables.propagate(e.getCause());
    } catch (final InterruptedException e) {
        throw new IllegalStateException(e);
    }

    Environment resultEnvironment = Iterables.getLast(resultEnvironments, initialEnvironment);
    for (final Environment intermediateResultEnvironment : Iterables.filter(resultEnvironments,
            Predicates.notNull())) {
        for (final EnvironmentPort port : intermediateResultEnvironment.getOutputPorts()) {
            if (!resultEnvironment.getPorts().containsKey(port.getPortReference())) {
                resultEnvironment = resultEnvironment.addPorts(port);
            }
        }
    }

    return resultEnvironment;
}

From source file:de.metas.ui.web.order.sales.purchasePlanning.view.PurchaseRowsSaver.java

private ImmutableSet<DemandGroupReference> extractDemandIds(@NonNull final List<PurchaseRow> groupingRows) {
    return groupingRows.stream().flatMap(groupingRow -> groupingRow.getIncludedRows().stream())
            .flatMap(lineRow -> lineRow.getDemandGroupReferences().stream()).filter(Predicates.notNull())
            .collect(ImmutableSet.toImmutableSet());
}

From source file:net.shibboleth.idp.attribute.resolver.ad.impl.TemplateAttributeDefinition.java

/**
 * Set the source attribute IDs./*  ww  w  . j a v  a2 s .  c o  m*/
 * 
 * @param newSourceAttributes the source attribute IDs
 */
public void setSourceAttributes(@Nonnull @NullableElements final List<String> newSourceAttributes) {
    ComponentSupport.ifInitializedThrowUnmodifiabledComponentException(this);
    ComponentSupport.ifDestroyedThrowDestroyedComponentException(this);
    Constraint.isNotNull(newSourceAttributes, "Source attribute list cannot be null");

    sourceAttributes = new ArrayList<>(newSourceAttributes.size());
    CollectionSupport.addIf(sourceAttributes, newSourceAttributes, Predicates.notNull());
}

From source file:org.apache.brooklyn.location.jclouds.BlobStoreContextFactoryImpl.java

@Override
public BlobStoreContext newBlobStoreContext(Location location) {
    String rawProvider = checkNotNull(location.getConfig(LocationConfigKeys.CLOUD_PROVIDER),
            "provider must not be null");
    String provider = DeserializingJcloudsRenamesProvider.INSTANCE.applyJcloudsRenames(rawProvider);
    String identity = checkNotNull(location.getConfig(LocationConfigKeys.ACCESS_IDENTITY),
            "identity must not be null");
    String credential = checkNotNull(location.getConfig(LocationConfigKeys.ACCESS_CREDENTIAL),
            "credential must not be null");
    String endpoint = location.getConfig(CloudLocationConfig.CLOUD_ENDPOINT);

    Properties overrides = new Properties();
    // * Java 7,8 bug workaround - sockets closed by GC break the internal bookkeeping
    //   of HttpUrlConnection, leading to invalid handling of the "HTTP/1.1 100 Continue"
    //   response. Coupled with a bug when using SSL sockets reads will block
    //   indefinitely even though a read timeout is explicitly set.
    // * Java 6 ignores the header anyways as it is included in its restricted headers black list.
    // * Also there's a bug in SL object store which still expects Content-Length bytes
    //   even when it responds with a 408 timeout response, leading to incorrectly
    //   interpreting the next request (triggered by above problem).
    overrides.setProperty(Constants.PROPERTY_STRIP_EXPECT_HEADER, "true");

    // Add extra jclouds-specific configuration
    Map<String, Object> extra = Maps.filterKeys(((LocationInternal) location).config().getBag().getAllConfig(),
            Predicates.containsPattern("^jclouds\\."));
    if (extra.size() > 0) {
        LOG.debug("Configuring custom jclouds property overrides for {}: {}", provider,
                Sanitizer.sanitize(extra));
    }/*from  www .j  a v a2s.  com*/
    overrides.putAll(Maps.filterValues(extra, Predicates.notNull()));

    ContextBuilder contextBuilder = ContextBuilder.newBuilder(provider).credentials(identity, credential);
    contextBuilder.modules(MutableList.copyOf(getCommonModules()));
    if (!org.apache.brooklyn.util.text.Strings.isBlank(endpoint)) {
        contextBuilder.endpoint(endpoint);
    }
    contextBuilder.overrides(overrides);
    BlobStoreContext context = contextBuilder.buildView(BlobStoreContext.class);
    return context;
}

From source file:com.google.devtools.build.lib.skyframe.PrepareAnalysisPhaseFunction.java

@Override
public PrepareAnalysisPhaseValue compute(SkyKey key, Environment env)
        throws InterruptedException, PrepareAnalysisPhaseFunctionException {
    PrepareAnalysisPhaseKey options = (PrepareAnalysisPhaseKey) key.argument();

    BuildOptions targetOptions = defaultBuildOptions.applyDiff(options.getOptionsDiff());
    BuildOptions hostOptions = targetOptions.get(BuildConfiguration.Options.class).useDistinctHostConfiguration
            ? HostTransition.INSTANCE.patch(targetOptions)
            : targetOptions;//  ww  w .j  av  a2 s .  c  om

    ImmutableSortedSet<Class<? extends BuildConfiguration.Fragment>> allFragments = options.getFragments()
            .fragmentClasses();
    BuildConfigurationValue.Key hostConfigurationKey = BuildConfigurationValue.key(allFragments,
            BuildOptions.diffForReconstruction(defaultBuildOptions, hostOptions));
    ImmutableList<BuildConfigurationValue.Key> targetConfigurationKeys = getTopLevelBuildOptions(targetOptions,
            options.getMultiCpu())
                    .stream()
                    .map(elem -> BuildConfigurationValue.key(allFragments,
                            BuildOptions.diffForReconstruction(defaultBuildOptions, elem)))
                    .collect(ImmutableList.toImmutableList());

    // We don't need the host configuration below, but we call this to get the error, if any.
    try {
        env.getValueOrThrow(hostConfigurationKey, InvalidConfigurationException.class);
    } catch (InvalidConfigurationException e) {
        throw new PrepareAnalysisPhaseFunctionException(e);
    }

    Map<SkyKey, SkyValue> configs = env.getValues(targetConfigurationKeys);

    // We only report invalid options for the target configurations, and abort if there's an error.
    ErrorSensingEventHandler nosyEventHandler = new ErrorSensingEventHandler(env.getListener());
    targetConfigurationKeys.stream().map(k -> configs.get(k)).filter(Predicates.notNull())
            .map(v -> ((BuildConfigurationValue) v).getConfiguration())
            .forEach(config -> config.reportInvalidOptions(nosyEventHandler));
    if (nosyEventHandler.hasErrors()) {
        throw new PrepareAnalysisPhaseFunctionException(
                new InvalidConfigurationException("Build options are invalid"));
    }

    // We get the list of labels from the TargetPatternPhaseValue, so we are reasonably certain that
    // there will not be an error loading these again.
    ResolvedTargets<Target> resolvedTargets = TestSuiteExpansionFunction.labelsToTargets(env,
            options.getLabels(), false);
    if (resolvedTargets == null) {
        return null;
    }
    ImmutableSet<Target> targets = resolvedTargets.getTargets();

    // We use a hash set here to remove duplicate nodes; this can happen for input files and package
    // groups.
    LinkedHashSet<TargetAndConfiguration> nodes = new LinkedHashSet<>(targets.size());
    for (Target target : targets) {
        if (target.isConfigurable()) {
            for (BuildConfigurationValue.Key configKey : targetConfigurationKeys) {
                BuildConfiguration config = ((BuildConfigurationValue) configs.get(configKey))
                        .getConfiguration();
                nodes.add(new TargetAndConfiguration(target, config));
            }
        } else {
            nodes.add(new TargetAndConfiguration(target, null));
        }
    }

    // We'll get the configs from #resolveConfigurations below, which started out as a copy of the
    // same code in SkyframeExecutor, which gets configurations for deps including transitions. So,
    // for now, to satisfy its API we resolve transitions and repackage each target as a Dependency
    // (with a NONE transition if necessary).
    // Keep this in sync with AnalysisUtils#getTargetsWithConfigs.
    Multimap<BuildConfiguration, Dependency> asDeps = AnalysisUtils.targetsToDeps(nodes, ruleClassProvider);
    LinkedHashSet<TargetAndConfiguration> topLevelTargetsWithConfigs = resolveConfigurations(env, nodes,
            asDeps);
    if (env.valuesMissing()) {
        return null;
    }
    ImmutableList<ConfiguredTargetKey> topLevelCtKeys = topLevelTargetsWithConfigs.stream()
            .map(node -> ConfiguredTargetKey.of(node.getLabel(), node.getConfiguration()))
            .collect(ImmutableList.toImmutableList());
    return new PrepareAnalysisPhaseValue(hostConfigurationKey, targetConfigurationKeys, topLevelCtKeys);
}

From source file:com.google.devtools.build.lib.skyframe.PrepareAnalysisPhaseValue.java

/**
 * Returns the intended top-level targets and configurations for the build. Note that this
 * performs additional Skyframe calls for the involved configurations and targets, which may be
 * expensive.// w ww  .  java  2  s . co m
 */
public Collection<TargetAndConfiguration> getTopLevelCts(ExtendedEventHandler eventHandler,
        SkyframeExecutor skyframeExecutor) {
    List<TargetAndConfiguration> result = new ArrayList<>();
    Map<BuildConfigurationValue.Key, BuildConfiguration> configs = skyframeExecutor
            .getConfigurations(eventHandler, topLevelCtKeys.stream().map(ctk -> ctk.getConfigurationKey())
                    .filter(Predicates.notNull()).collect(Collectors.toSet()));

    // TODO(ulfjack): This performs one Skyframe call per top-level target. This is not a
    // regression, but we should fix it nevertheless, either by doing a bulk lookup call or by
    // migrating the consumers of these to Skyframe so they can directly request the values.
    for (ConfiguredTargetKey key : topLevelCtKeys) {
        Target target;
        try {
            target = skyframeExecutor.getPackageManager().getTarget(eventHandler, key.getLabel());
        } catch (NoSuchPackageException | NoSuchTargetException | InterruptedException e) {
            throw new RuntimeException("Failed to get package from TargetPatternPhaseValue", e);
        }
        BuildConfiguration config = key.getConfigurationKey() == null ? null
                : configs.get(key.getConfigurationKey());
        result.add(new TargetAndConfiguration(target, config));
    }
    return result;
}

From source file:com.tinspx.util.io.charset.AbstractCharDet.java

public List<BasicError> errors() {
    return Predicated.list(errors, Predicates.notNull());
}

From source file:org.pentaho.di.trans.dataservice.serialization.DataServiceMetaStoreUtil.java

public DataServiceMeta getDataService(String serviceName, Repository repository, IMetaStore metaStore)
        throws MetaStoreException {
    ServiceTrans transReference = getServiceTransFactory(metaStore).loadElement(serviceName);
    if (transReference == null) {
        throw new MetaStoreException(MessageFormat.format("Data Service {0} not found", serviceName));
    }//from  www  .  j  a  v  a 2 s. co m

    final AtomicReference<Exception> loadException = new AtomicReference<Exception>();
    Optional<TransMeta> transMeta = FluentIterable.from(transReference.getReferences())
            .transform(createTransMetaLoader(repository, new Function<Exception, Void>() {
                @Override
                public Void apply(Exception e) {
                    loadException.compareAndSet(null, e);
                    return null;
                }
            })).transform(Suppliers.<TransMeta>supplierFunction()).firstMatch(Predicates.notNull());

    if (transMeta.isPresent()) {
        return getDataService(serviceName, transMeta.get());
    } else {
        throw new MetaStoreException(MessageFormat.format("Failed to load Data Service {0}", serviceName),
                loadException.get());
    }
}

From source file:org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider.java

/**
 * Returns the bits for the given privileges
 *
 * @param privileges the privileges//from   w ww.ja v a2 s . c  om
 * @param nameMapper the name mapper
 * @return the privilege bits
 */
@Nonnull
public PrivilegeBits getBits(@Nonnull Privilege[] privileges, @Nonnull final NameMapper nameMapper) {
    return getBits(
            Iterables.filter(Iterables.transform(Arrays.asList(privileges), new Function<Privilege, String>() {

                @Override
                public String apply(@Nullable Privilege privilege) {
                    if (privilege != null) {
                        try {
                            return nameMapper.getOakName(privilege.getName());
                        } catch (RepositoryException e) {
                            log.debug("Unable to resolve OAK name of privilege " + privilege, e);
                        }
                    }
                    // null privilege or failed to resolve the privilege name
                    return null;
                }
            }), Predicates.notNull()));
}

From source file:org.robotframework.ide.eclipse.main.plugin.model.RobotProject.java

public Collection<LibrarySpecification> getLibrariesSpecifications() {
    final List<LibrarySpecification> specifications = newArrayList();
    specifications.addAll(getStandardLibraries().values());
    specifications.addAll(getReferencedLibraries().values());
    return newArrayList(filter(specifications, Predicates.notNull()));
}