Example usage for com.google.common.base Predicates instanceOf

List of usage examples for com.google.common.base Predicates instanceOf

Introduction

In this page you can find the example usage for com.google.common.base Predicates instanceOf.

Prototype

@GwtIncompatible("Class.isInstance")
public static Predicate<Object> instanceOf(Class<?> clazz) 

Source Link

Document

Returns a predicate that evaluates to true if the object being tested is an instance of the given class.

Usage

From source file:com.b2international.snowowl.snomed.importer.rf2.util.ImportUtil.java

private SnomedImportResult doImportInternal(final SnomedImportContext context, final String requestingUserId,
        final ImportConfiguration configuration, final IProgressMonitor monitor) {
    final SubMonitor subMonitor = SubMonitor.convert(monitor, "Importing release files...", 17);
    final SnomedImportResult result = new SnomedImportResult();

    CodeSystemEntry codeSystem = CodeSystemRequests.prepareGetCodeSystem(configuration.getCodeSystemShortName())
            .build(SnomedDatastoreActivator.REPOSITORY_UUID).execute(getEventBus()).getSync();

    IBranchPath codeSystemPath = BranchPathUtils.createPath(codeSystem.getBranchPath());
    String importPath = configuration.getBranchPath();
    final IBranchPath branchPath;

    if (importPath.startsWith(IBranchPath.MAIN_BRANCH)) {
        IBranchPath candidate = BranchPathUtils.createPath(importPath);
        Iterator<IBranchPath> iterator = BranchPathUtils.bottomToTopIterator(candidate);
        boolean found = false;

        while (iterator.hasNext()) {
            candidate = iterator.next();
            if (codeSystemPath.equals(candidate)) {
                found = true;//from   w w  w . j  av a 2  s. c  o m
                break;
            }
        }

        if (!found) {
            throw new ImportException("Import path %s is not valid for code system %s.", importPath,
                    configuration.getCodeSystemShortName());
        }

        branchPath = BranchPathUtils.createPath(importPath); // importPath is absolute
    } else {
        branchPath = BranchPathUtils.createPath(codeSystemPath, importPath); // importPath is relative to the code system's work branch
    }

    LogUtils.logImportActivity(IMPORT_LOGGER, requestingUserId, branchPath,
            "SNOMED CT import started from RF2 release format.");

    final RepositoryState repositoryState = getIndex().read(configuration.getBranchPath(),
            new RevisionIndexRead<RepositoryState>() {
                @Override
                public RepositoryState execute(RevisionSearcher searcher) throws IOException {
                    return loadRepositoryState(searcher);
                }
            });

    if (!isContentValid(repositoryState, result, requestingUserId, configuration, branchPath, subMonitor)) {
        LogUtils.logImportActivity(IMPORT_LOGGER, requestingUserId, branchPath,
                "SNOMED CT import failed due to invalid RF2 release file(s).");
        return result;
    }

    final Set<URL> patchedRefSetURLs = Sets.newHashSet(configuration.getRefSetUrls());
    final Set<String> patchedExcludedRefSetIDs = Sets.newHashSet(configuration.getExcludedRefSetIds());
    final List<Importer> importers = Lists.newArrayList();

    final File stagingDirectoryRoot = new File(System.getProperty("java.io.tmpdir"));

    context.setVersionCreationEnabled(configuration.isCreateVersions());
    context.setLogger(IMPORT_LOGGER);
    context.setStagingDirectory(stagingDirectoryRoot);
    context.setContentSubType(configuration.getContentSubType());
    context.setIgnoredRefSetIds(patchedExcludedRefSetIDs);
    context.setReleasePatch(configuration.isReleasePatch());
    context.setPatchReleaseVersion(configuration.getPatchReleaseVersion());
    context.setCodeSystemShortName(configuration.getCodeSystemShortName());

    try {

        if (configuration.isValidReleaseFile(configuration.getConceptFile())) {
            final URL url = configuration.toURL(configuration.getConceptFile());
            importers.add(new SnomedConceptImporter(context, url.openStream(),
                    configuration.getMappedName(url.getPath())));
        }

        for (File descriptionFile : configuration.getDescriptionFiles()) {
            if (configuration.isValidReleaseFile(descriptionFile)) {
                final URL url = configuration.toURL(descriptionFile);
                importers.add(new SnomedDescriptionImporter(context, url.openStream(),
                        configuration.getMappedName(url.getPath()), ComponentImportType.DESCRIPTION));
            }
        }

        for (File textFile : configuration.getTextDefinitionFiles()) {
            if (configuration.isValidReleaseFile(textFile)) {
                final URL url = configuration.toURL(textFile);
                importers.add(new SnomedDescriptionImporter(context, url.openStream(),
                        configuration.getMappedName(url.getPath()), ComponentImportType.TEXT_DEFINITION));
            }

        }

        if (configuration.isValidReleaseFile(configuration.getRelationshipFile())) {
            final URL url = configuration.toURL(configuration.getRelationshipFile());
            importers.add(new SnomedRelationshipImporter(context, url.openStream(),
                    configuration.getMappedName(url.getPath()), ComponentImportType.RELATIONSHIP));
        }

        if (configuration.isValidReleaseFile(configuration.getStatedRelationshipFile())) {
            final URL url = configuration.toURL(configuration.getStatedRelationshipFile());
            importers.add(new SnomedRelationshipImporter(context, url.openStream(),
                    configuration.getMappedName(url.getPath()), ComponentImportType.STATED_RELATIONSHIP));
        }

    } catch (final IOException e) {
        final String reason = null != e.getMessage() ? " Reason: '" + e.getMessage() + "'" : "";
        LogUtils.logImportActivity(IMPORT_LOGGER, requestingUserId, branchPath,
                "SNOMED CT import failed due to invalid RF2 release file URL." + reason);
        throw new ImportException("Invalid release file URL(s).", e);
    }

    for (final URL url : patchedRefSetURLs) {

        try {

            final AbstractSnomedRefSetImporter<?, ?> createRefSetImporter = SnomedRefSetImporterFactory
                    .createRefSetImporter(url, context, configuration.getMappedName(url.getPath()));

            if (createRefSetImporter == null) {
                final String message = MessageFormat
                        .format("Skipping unsupported reference set with URL ''{0}''.", url);
                LogUtils.logImportActivity(IMPORT_LOGGER, requestingUserId, branchPath, message);
                IMPORT_LOGGER.info(message);
            } else {
                importers.add(createRefSetImporter);
            }

        } catch (final IOException e) {
            final String reason = null != e.getMessage() ? " Reason: '" + e.getMessage() + "'" : "";
            LogUtils.logImportActivity(IMPORT_LOGGER, requestingUserId, branchPath,
                    "SNOMED CT import failed due to I/O error while creating reference set importer." + reason);
            throw new ImportException("I/O error occurred while creating reference set importer.", e);
        }
    }

    final boolean terminologyExistsBeforeImport = getIndex().read(BranchPathUtils.createMainPath().getPath(),
            new RevisionIndexRead<Boolean>() {
                @Override
                public Boolean execute(RevisionSearcher index) throws IOException {
                    return index.search(Query.select(SnomedConceptDocument.class)
                            .where(SnomedConceptDocument.Expressions.id(Concepts.ROOT_CONCEPT)).limit(0)
                            .build()).getTotal() > 0;
                }
            });
    final boolean onlyRefSetImportersRegistered = Iterables.all(importers,
            Predicates.instanceOf(AbstractSnomedRefSetImporter.class));

    /*
     * Commit notifications for changes made by the import should only be sent if the terminology already exists,
     * and only changes for reference sets are coming in from the import files. 
     */
    context.setCommitNotificationEnabled(terminologyExistsBeforeImport && onlyRefSetImportersRegistered);
    context.setUserId(requestingUserId);

    final ICDOConnectionManager connectionManager = ApplicationContext.getInstance()
            .getService(ICDOConnectionManager.class);
    final CDOBranch branch = connectionManager.get(SnomedPackage.eINSTANCE).getBranch(branchPath);

    if (null == branch) {
        throw new ImportException("Branch does not exist. [" + branchPath + "]");
    }

    final SnomedEditingContext editingContext = new SnomedEditingContext(branchPath);
    context.setEditingContext(editingContext);
    context.setAggregatorSupplier(
            new EffectiveTimeBaseTransactionAggregatorSupplier(editingContext.getTransaction()));

    final IOperationLockTarget lockTarget = new SingleRepositoryAndBranchLockTarget(
            editingContext.getTransaction().getSession().getRepositoryInfo().getUUID(), branchPath);
    final DatastoreLockContext lockContext = new DatastoreLockContext(requestingUserId,
            DatastoreLockContextDescriptions.IMPORT);
    final SnomedImportResult[] resultHolder = new SnomedImportResult[1];
    final IDatastoreOperationLockManager lockManager = ApplicationContext.getInstance()
            .getServiceChecked(IDatastoreOperationLockManager.class);

    final FeatureToggles features = ApplicationContext.getServiceForClass(FeatureToggles.class);
    final String importFeatureToggle = Features.getImportFeatureToggle(SnomedDatastoreActivator.REPOSITORY_UUID,
            branchPath.getPath(), configuration.getContentSubType().getLowerCaseName());
    try {
        features.enable(importFeatureToggle);
        OperationLockRunner.with(lockManager).run(new Runnable() {
            @Override
            public void run() {
                resultHolder[0] = doImportLocked(requestingUserId, configuration, result, branchPath, context,
                        subMonitor, importers, editingContext, branch, repositoryState);
            }
        }, lockContext, IOperationLockManager.NO_TIMEOUT, lockTarget);
    } catch (final OperationLockException | InterruptedException e) {
        throw new ImportException("Caught exception while locking repository for import.", e);
    } catch (final InvocationTargetException e) {
        throw new ImportException("Failed to import RF2 release.", e.getCause());
    } finally {
        features.disable(importFeatureToggle);
    }

    return resultHolder[0];
}

From source file:org.eclipse.emf.compare.merge.AbstractMerger.java

/**
 * Returns the master equivalence for a {@link FeatureMapChange}.
 * //  ww w . j av  a  2s. c om
 * @see AbstractMerger#findMasterEquivalence(Diff, boolean)
 * @param diff
 *            The {@link Diff} we need to check the equivalence for a 'master' difference.
 * @param mergeRightToLeft
 *            Direction of the current merging.
 * @return The master difference of {@code diff} and its equivalent diffs. This method may return
 *         <code>null</code> if there is no master diff.
 */
private Diff getMasterEquivalenceForFeatureMapChange(FeatureMapChange diff, boolean mergeRightToLeft) {
    if (diff.getKind() == DifferenceKind.MOVE) {
        final Comparison comparison = diff.getMatch().getComparison();
        final FeatureMap.Entry entry = (FeatureMap.Entry) diff.getValue();

        if (entry.getValue() instanceof EObject) {
            final Match valueMatch = comparison.getMatch((EObject) entry.getValue());

            final EObject expectedValue = ComparisonUtil.getExpectedSide(valueMatch, diff.getSource(),
                    mergeRightToLeft);

            // Try to find the ReferenceChange-MasterEquivalence when the expected value will not be
            // contained in a FeatureMap
            if (!ComparisonUtil.isContainedInFeatureMap(expectedValue)) {
                return Iterators.tryFind(diff.getEquivalence().getDifferences().iterator(),
                        Predicates.instanceOf(ReferenceChange.class)).orNull();
            }
        }

    }
    return null;
}

From source file:org.obeonetwork.dsl.uml2.design.services.ActivityServices.java

/**
 * Get all the behaviors available in the semantic resources.
 * /*from   ww w. j ava 2 s.  c o  m*/
 * @param eObj
 *            Semantic element
 * @return All the behaviors
 */
public List<EObject> getAllBehaviors(Element element) {
    List<EObject> behaviors = Lists.newArrayList();
    UMLServices umlServices = new UMLServices();
    List<org.eclipse.uml2.uml.Package> rootPkgs = umlServices.getAllAvailableRootPackages(element);
    for (org.eclipse.uml2.uml.Package pkg : rootPkgs) {
        Iterators.addAll(behaviors,
                Iterators.filter(pkg.eAllContents(), Predicates.instanceOf(Behavior.class)));
    }

    return behaviors;
}

From source file:org.apache.brooklyn.entity.group.DynamicClusterImpl.java

@Override
public void restart() {
    String mode = getConfig(RESTART_MODE);
    if (mode == null) {
        throw new UnsupportedOperationException(
                "Restart not supported for this cluster: " + RESTART_MODE.getName() + " is not configured.");
    }//w  ww .  j a  va2s  . c  o m
    if ("off".equalsIgnoreCase(mode)) {
        throw new UnsupportedOperationException("Restart not supported for this cluster.");
    }

    if ("sequential".equalsIgnoreCase(mode)) {
        ServiceStateLogic.setExpectedState(this, Lifecycle.STARTING);
        DynamicTasks
                .queue(Effectors.invocationSequential(Startable.RESTART, null, Iterables.filter(getChildren(),
                        Predicates.and(Predicates.instanceOf(Startable.class), EntityPredicates.isManaged()))));
    } else if ("parallel".equalsIgnoreCase(mode)) {
        ServiceStateLogic.setExpectedState(this, Lifecycle.STARTING);
        DynamicTasks.queue(Effectors.invocationParallel(Startable.RESTART, null, Iterables.filter(getChildren(),
                Predicates.and(Predicates.instanceOf(Startable.class), EntityPredicates.isManaged()))));
    } else {
        throw new IllegalArgumentException("Unknown " + RESTART_MODE.getName() + " '" + mode + "'");
    }

    DynamicTasks.waitForLast();
    ServiceStateLogic.setExpectedState(this, Lifecycle.RUNNING);
}

From source file:org.dasein.cloud.jclouds.vcloud.director.compute.VAppTemplateSupport.java

public static <S extends SectionType> S getSection(VAppTemplate template, Class<S> sectionClass) {
    S section = (S) Iterables.find(template.getSections(), Predicates.instanceOf(sectionClass));
    return section;
}

From source file:de.sanandrew.mods.claysoldiers.entity.EntityClayMan.java

@SuppressWarnings("unchecked")
public Collection<EntityClayMan> getSoldiersInRange() {
    return Collections2.transform(
            Collections2.filter(this.p_entitiesInRange, Predicates.instanceOf(EntityClayMan.class)),
            new Function<Object, EntityClayMan>() {
                @Override// w w w . jav a2s  .co m
                public EntityClayMan apply(Object input) {
                    return (EntityClayMan) input;
                }
            });
}

From source file:org.sosy_lab.cpachecker.util.predicates.pathformula.PathFormulaManagerImpl.java

/**
 * Build a formula containing a predicate for all branching situations in the
 * ARG. If a satisfying assignment is created for this formula, it can be used
 * to find out which paths in the ARG are feasible.
 *
 * This method may be called with an empty set, in which case it does nothing
 * and returns the formula "true"./*w  w  w  .jav  a  2  s  .c o m*/
 *
 * @param elementsOnPath The ARG states that should be considered.
 * @return A formula containing a predicate for each branching.
 */
@Override
public BooleanFormula buildBranchingFormula(Iterable<ARGState> elementsOnPath)
        throws CPATransferException, InterruptedException {
    // build the branching formula that will help us find the real error path
    BooleanFormula branchingFormula = bfmgr.makeBoolean(true);
    for (final ARGState pathElement : elementsOnPath) {

        if (pathElement.getChildren().size() > 1) {
            if (pathElement.getChildren().size() > 2) {
                // can't create branching formula
                if (from(pathElement.getChildren()).anyMatch(AbstractStates.IS_TARGET_STATE)) {
                    // We expect this situation of one of the children is a target state created by PredicateCPA.
                    continue;
                } else {
                    logger.log(Level.WARNING, "ARG branching with more than two outgoing edges at ARG node "
                            + pathElement.getStateId() + ".");
                    return bfmgr.makeBoolean(true);
                }
            }

            FluentIterable<CFAEdge> outgoingEdges = from(pathElement.getChildren())
                    .transform(new Function<ARGState, CFAEdge>() {
                        @Override
                        public CFAEdge apply(ARGState child) {
                            return pathElement.getEdgeToChild(child);
                        }
                    });
            if (!outgoingEdges.allMatch(Predicates.instanceOf(AssumeEdge.class))) {
                if (from(pathElement.getChildren()).anyMatch(AbstractStates.IS_TARGET_STATE)) {
                    // We expect this situation of one of the children is a target state created by PredicateCPA.
                    continue;
                } else {
                    logger.log(Level.WARNING,
                            "ARG branching without AssumeEdge at ARG node " + pathElement.getStateId() + ".");
                    return bfmgr.makeBoolean(true);
                }
            }

            AssumeEdge edge = null;
            for (CFAEdge currentEdge : outgoingEdges) {
                if (((AssumeEdge) currentEdge).getTruthAssumption()) {
                    edge = (AssumeEdge) currentEdge;
                    break;
                }
            }
            assert edge != null;
            BooleanFormula pred = bfmgr.makeVariable(BRANCHING_PREDICATE_NAME + pathElement.getStateId(), 0);

            // create formula by edge, be sure to use the correct SSA indices!
            // TODO the class PathFormulaManagerImpl should not depend on PredicateAbstractState,
            // it is used without PredicateCPA as well.
            PathFormula pf;
            PredicateAbstractState pe = AbstractStates.extractStateByType(pathElement,
                    PredicateAbstractState.class);
            if (pe == null) {
                logger.log(Level.WARNING, "Cannot find precise error path information without PredicateCPA");
                return bfmgr.makeBoolean(true);
            } else {
                pf = pe.getPathFormula();
            }
            pf = this.makeEmptyPathFormula(pf); // reset everything except SSAMap
            pf = this.makeAnd(pf, edge); // conjunct with edge

            BooleanFormula equiv = bfmgr.equivalence(pred, pf.getFormula());
            branchingFormula = bfmgr.and(branchingFormula, equiv);
        }
    }
    return branchingFormula;
}

From source file:org.obeonetwork.dsl.uml2.design.services.ActivityServices.java

/**
 * Get all the signals available in the semantic resources.
 * //from  ww w. j a  v a2s .  c  o m
 * @param eObj
 *            Semantic element
 * @return All the signals
 */
public List<EObject> getAllSignals(Element element) {
    List<EObject> signals = Lists.newArrayList();
    UMLServices umlServices = new UMLServices();
    List<org.eclipse.uml2.uml.Package> rootPkgs = umlServices.getAllAvailableRootPackages(element);
    for (org.eclipse.uml2.uml.Package pkg : rootPkgs) {
        Iterators.addAll(signals, Iterators.filter(pkg.eAllContents(), Predicates.instanceOf(Signal.class)));
    }

    return signals;
}

From source file:org.apache.impala.analysis.SelectStmt.java

/**
 * Analyze aggregation-relevant components of the select block (Group By clause,
 * select list, Order By clause), substitute AVG with SUM/COUNT, create the
 * AggregationInfo, including the agg output tuple, and transform all post-agg exprs
 * given AggregationInfo's smap.//from   www.j a va  2s  .co m
 */
private void analyzeAggregation(Analyzer analyzer) throws AnalysisException {
    // Analyze the HAVING clause first so we can check if it contains aggregates.
    // We need to analyze/register it even if we are not computing aggregates.
    if (havingClause_ != null) {
        if (havingClause_.contains(Predicates.instanceOf(Subquery.class))) {
            throw new AnalysisException("Subqueries are not supported in the HAVING clause.");
        }
        // substitute aliases in place (ordinals not allowed in having clause)
        havingPred_ = havingClause_.substitute(aliasSmap_, analyzer, false);
        havingPred_.checkReturnsBool("HAVING clause", true);
        // can't contain analytic exprs
        Expr analyticExpr = havingPred_.findFirstOf(AnalyticExpr.class);
        if (analyticExpr != null) {
            throw new AnalysisException(
                    "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql());
        }
    }

    if (groupingExprs_ == null && !selectList_.isDistinct()
            && !TreeNode.contains(resultExprs_, Expr.isAggregatePredicate())
            && (havingPred_ == null || !havingPred_.contains(Expr.isAggregatePredicate())) && (sortInfo_ == null
                    || !TreeNode.contains(sortInfo_.getOrderingExprs(), Expr.isAggregatePredicate()))) {
        // We're not computing aggregates but we still need to register the HAVING
        // clause which could, e.g., contain a constant expression evaluating to false.
        if (havingPred_ != null)
            analyzer.registerConjuncts(havingPred_, true);
        return;
    }

    // If we're computing an aggregate, we must have a FROM clause.
    if (fromClause_.isEmpty()) {
        throw new AnalysisException("aggregation without a FROM clause is not allowed");
    }

    if (selectList_.isDistinct()
            && (groupingExprs_ != null || TreeNode.contains(resultExprs_, Expr.isAggregatePredicate())
                    || (havingPred_ != null && havingPred_.contains(Expr.isAggregatePredicate())))) {
        throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY");
    }

    // Disallow '*' with explicit GROUP BY or aggregation function (we can't group by
    // '*', and if you need to name all star-expanded cols in the group by clause you
    // might as well do it in the select list).
    if (groupingExprs_ != null || TreeNode.contains(resultExprs_, Expr.isAggregatePredicate())) {
        for (SelectListItem item : selectList_.getItems()) {
            if (item.isStar()) {
                throw new AnalysisException("cannot combine '*' in select list with grouping or aggregation");
            }
        }
    }

    // disallow subqueries in the GROUP BY clause
    if (groupingExprs_ != null) {
        for (Expr expr : groupingExprs_) {
            if (expr.contains(Predicates.instanceOf(Subquery.class))) {
                throw new AnalysisException("Subqueries are not supported in the GROUP BY clause.");
            }
        }
    }

    // analyze grouping exprs
    ArrayList<Expr> groupingExprsCopy = Lists.newArrayList();
    if (groupingExprs_ != null) {
        // make a deep copy here, we don't want to modify the original
        // exprs during analysis (in case we need to print them later)
        groupingExprsCopy = Expr.cloneList(groupingExprs_);

        substituteOrdinalsAliases(groupingExprsCopy, "GROUP BY", analyzer);

        for (int i = 0; i < groupingExprsCopy.size(); ++i) {
            groupingExprsCopy.get(i).analyze(analyzer);
            if (groupingExprsCopy.get(i).contains(Expr.isAggregatePredicate())) {
                // reference the original expr in the error msg
                throw new AnalysisException("GROUP BY expression must not contain aggregate functions: "
                        + groupingExprs_.get(i).toSql());
            }
            if (groupingExprsCopy.get(i).contains(AnalyticExpr.class)) {
                // reference the original expr in the error msg
                throw new AnalysisException("GROUP BY expression must not contain analytic expressions: "
                        + groupingExprsCopy.get(i).toSql());
            }
        }
    }

    // Collect the aggregate expressions from the SELECT, HAVING and ORDER BY clauses
    // of this statement.
    ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList();
    TreeNode.collect(resultExprs_, Expr.isAggregatePredicate(), aggExprs);
    if (havingPred_ != null) {
        havingPred_.collect(Expr.isAggregatePredicate(), aggExprs);
    }
    if (sortInfo_ != null) {
        // TODO: Avoid evaluating aggs in ignored order-bys
        TreeNode.collect(sortInfo_.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs);
    }

    // Optionally rewrite all count(distinct <expr>) into equivalent NDV() calls.
    ExprSubstitutionMap ndvSmap = null;
    if (analyzer.getQueryCtx().client_request.query_options.appx_count_distinct) {
        ndvSmap = new ExprSubstitutionMap();
        for (FunctionCallExpr aggExpr : aggExprs) {
            if (!aggExpr.isDistinct() || !aggExpr.getFnName().getFunction().equals("count")
                    || aggExpr.getParams().size() != 1) {
                continue;
            }
            FunctionCallExpr ndvFnCall = new FunctionCallExpr("ndv", aggExpr.getParams().exprs());
            ndvFnCall.analyzeNoThrow(analyzer);
            Preconditions.checkState(ndvFnCall.getType().equals(aggExpr.getType()));
            ndvSmap.put(aggExpr, ndvFnCall);
        }
        // Replace all count(distinct <expr>) with NDV(<expr>).
        List<Expr> substAggExprs = Expr.substituteList(aggExprs, ndvSmap, analyzer, false);
        aggExprs.clear();
        for (Expr aggExpr : substAggExprs) {
            Preconditions.checkState(aggExpr instanceof FunctionCallExpr);
            aggExprs.add((FunctionCallExpr) aggExpr);
        }
    }

    // When DISTINCT aggregates are present, non-distinct (i.e. ALL) aggregates are
    // evaluated in two phases (see AggregateInfo for more details). In particular,
    // COUNT(c) in "SELECT COUNT(c), AGG(DISTINCT d) from R" is transformed to
    // "SELECT SUM(cnt) FROM (SELECT COUNT(c) as cnt from R group by d ) S".
    // Since a group-by expression is added to the inner query it returns no rows if
    // R is empty, in which case the SUM of COUNTs will return NULL.
    // However the original COUNT(c) should have returned 0 instead of NULL in this case.
    // Therefore, COUNT([ALL]) is transformed into zeroifnull(COUNT([ALL]) if
    // i) There is no GROUP-BY clause, and
    // ii) Other DISTINCT aggregates are present.
    ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer);
    countAllMap = ExprSubstitutionMap.compose(ndvSmap, countAllMap, analyzer);
    List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false);
    aggExprs.clear();
    TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs);
    createAggInfo(groupingExprsCopy, aggExprs, analyzer);

    // combine avg smap with the one that produces the final agg output
    AggregateInfo finalAggInfo = aggInfo_.getSecondPhaseDistinctAggInfo() != null
            ? aggInfo_.getSecondPhaseDistinctAggInfo()
            : aggInfo_;

    ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose(countAllMap, finalAggInfo.getOutputSmap(),
            analyzer);

    // change select list, having and ordering exprs to point to agg output. We need
    // to reanalyze the exprs at this point.
    if (LOG.isTraceEnabled()) {
        LOG.trace("combined smap: " + combinedSmap.debugString());
        LOG.trace("desctbl: " + analyzer.getDescTbl().debugString());
        LOG.trace("resultexprs: " + Expr.debugString(resultExprs_));
    }
    resultExprs_ = Expr.substituteList(resultExprs_, combinedSmap, analyzer, false);
    if (LOG.isTraceEnabled()) {
        LOG.trace("post-agg selectListExprs: " + Expr.debugString(resultExprs_));
    }
    if (havingPred_ != null) {
        // Make sure the predicate in the HAVING clause does not contain a
        // subquery.
        Preconditions.checkState(!havingPred_.contains(Predicates.instanceOf(Subquery.class)));
        havingPred_ = havingPred_.substitute(combinedSmap, analyzer, false);
        analyzer.registerConjuncts(havingPred_, true);
        if (LOG.isTraceEnabled()) {
            LOG.trace("post-agg havingPred: " + havingPred_.debugString());
        }
    }
    if (sortInfo_ != null) {
        sortInfo_.substituteOrderingExprs(combinedSmap, analyzer);
        if (LOG.isTraceEnabled()) {
            LOG.trace("post-agg orderingExprs: " + Expr.debugString(sortInfo_.getOrderingExprs()));
        }
    }

    // check that all post-agg exprs point to agg output
    for (int i = 0; i < selectList_.getItems().size(); ++i) {
        if (!resultExprs_.get(i).isBound(finalAggInfo.getOutputTupleId())) {
            SelectListItem selectListItem = selectList_.getItems().get(i);
            Preconditions.checkState(!selectListItem.isStar());
            throw new AnalysisException("select list expression not produced by aggregation output "
                    + "(missing from GROUP BY clause?): " + selectListItem.getExpr().toSql());
        }
    }
    if (orderByElements_ != null) {
        for (int i = 0; i < orderByElements_.size(); ++i) {
            if (!sortInfo_.getOrderingExprs().get(i).isBound(finalAggInfo.getOutputTupleId())) {
                throw new AnalysisException("ORDER BY expression not produced by aggregation output "
                        + "(missing from GROUP BY clause?): " + orderByElements_.get(i).getExpr().toSql());
            }
        }
    }
    if (havingPred_ != null) {
        if (!havingPred_.isBound(finalAggInfo.getOutputTupleId())) {
            throw new AnalysisException("HAVING clause not produced by aggregation output "
                    + "(missing from GROUP BY clause?): " + havingClause_.toSql());
        }
    }
}

From source file:de.sanandrew.mods.claysoldiers.entity.EntityClayMan.java

@SuppressWarnings("unchecked")
public Collection<EntityItem> getItemsInRange() {
    return Collections2.transform(
            Collections2.filter(this.p_entitiesInRange, Predicates.instanceOf(EntityItem.class)),
            new Function<Object, EntityItem>() {
                @Override/*  w ww  . j  a va 2 s.co m*/
                public EntityItem apply(Object input) {
                    return (EntityItem) input;
                }
            });
}