Example usage for com.google.common.base Predicates instanceOf

List of usage examples for com.google.common.base Predicates instanceOf

Introduction

In this page you can find the example usage for com.google.common.base Predicates instanceOf.

Prototype

@GwtIncompatible("Class.isInstance")
public static Predicate<Object> instanceOf(Class<?> clazz) 

Source Link

Document

Returns a predicate that evaluates to true if the object being tested is an instance of the given class.

Usage

From source file:org.eclipse.sirius.diagram.sequence.business.internal.refresh.SequenceCanonicalSynchronizerAdapter.java

private Set<ISequenceEvent> getEventEndsAfterUpperRange(SequenceDiagram sequenceDiagram, Lifeline lifeline,
        int upperRange, Set<ISequenceEvent> sequenceEventToIgnores) {
    Set<ISequenceEvent> eventEndsAfterUpperRange = new TreeSet<ISequenceEvent>(new RangeComparator());
    Set<ISequenceEvent> allSequenceEventsInUpperRange = new SequenceDiagramQuery(sequenceDiagram)
            .getAllSequenceEventsUpperThan(upperRange);
    allSequenceEventsInUpperRange.removeAll(sequenceEventToIgnores);
    eventEndsAfterUpperRange.addAll(//from   w w  w .jav a2s .c o m
            Sets.filter(allSequenceEventsInUpperRange, Predicates.not(Predicates.instanceOf(Lifeline.class))));
    return eventEndsAfterUpperRange;
}

From source file:org.obeonetwork.dsl.uml2.core.api.services.ActivityDiagramServices.java

/**
 * Get all the signals available in the semantic resources.
 *
 * @param element/* ww w.ja v a 2s.c o  m*/
 *            Semantic element
 * @return All the signals
 */
public List<EObject> getAllSignals(Element element) {
    final List<EObject> signals = Lists.newArrayList();
    final List<org.eclipse.uml2.uml.Package> rootPkgs = getAllAvailableRootPackages(element);
    for (final org.eclipse.uml2.uml.Package pkg : rootPkgs) {
        Iterators.addAll(signals, Iterators.filter(pkg.eAllContents(), Predicates.instanceOf(Signal.class)));
    }

    return signals;
}

From source file:com.abiquo.server.core.common.EnvironmentGenerator.java

/**
 * Get the entity of the given class in the current environment.
 * //ww w .  j  a v a  2  s. c  o m
 * @param clazz The class of the entity to get.
 * @return The entity of the given class in the current environment.
 */
@SuppressWarnings("unchecked")
public <T> T get(final Class<T> clazz) {
    return (T) Iterables.find(entities, Predicates.instanceOf(clazz));
}

From source file:com.cloudera.impala.analysis.SelectStmt.java

/**
 * Analyze aggregation-relevant components of the select block (Group By clause,
 * select list, Order By clause), substitute AVG with SUM/COUNT, create the
 * AggregationInfo, including the agg output tuple, and transform all post-agg exprs
 * given AggregationInfo's smap./*from   w  w  w  .  j ava  2s.  co  m*/
 */
private void analyzeAggregation(Analyzer analyzer) throws AnalysisException {
    if (groupingExprs_ == null && !selectList_.isDistinct()
            && !TreeNode.contains(resultExprs_, Expr.isAggregatePredicate()) && (sortInfo_ == null
                    || !TreeNode.contains(sortInfo_.getOrderingExprs(), Expr.isAggregatePredicate()))) {
        // we're not computing aggregates
        return;
    }

    // If we're computing an aggregate, we must have a FROM clause.
    if (tableRefs_.size() == 0) {
        throw new AnalysisException("aggregation without a FROM clause is not allowed");
    }

    if ((groupingExprs_ != null || TreeNode.contains(resultExprs_, Expr.isAggregatePredicate()))
            && selectList_.isDistinct()) {
        throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY");
    }

    // disallow '*' and explicit GROUP BY (we can't group by '*', and if you need to
    // name all star-expanded cols in the group by clause you might as well do it
    // in the select list)
    if (groupingExprs_ != null) {
        for (SelectListItem item : selectList_.getItems()) {
            if (item.isStar()) {
                throw new AnalysisException("cannot combine '*' in select list with GROUP BY: " + item.toSql());
            }
        }
    }

    // disallow subqueries in the GROUP BY clause
    if (groupingExprs_ != null) {
        for (Expr expr : groupingExprs_) {
            if (expr.contains(Predicates.instanceOf(Subquery.class))) {
                throw new AnalysisException("Subqueries are not supported in the GROUP BY clause.");
            }
        }
    }

    // analyze grouping exprs
    ArrayList<Expr> groupingExprsCopy = Lists.newArrayList();
    if (groupingExprs_ != null) {
        // make a deep copy here, we don't want to modify the original
        // exprs during analysis (in case we need to print them later)
        groupingExprsCopy = Expr.cloneList(groupingExprs_);
        substituteOrdinals(groupingExprsCopy, "GROUP BY", analyzer);
        Expr ambiguousAlias = getFirstAmbiguousAlias(groupingExprsCopy);
        if (ambiguousAlias != null) {
            throw new AnalysisException(
                    "Column '" + ambiguousAlias.toSql() + "' in GROUP BY clause is ambiguous");
        }
        groupingExprsCopy = Expr.trySubstituteList(groupingExprsCopy, aliasSmap_, analyzer, false);
        for (int i = 0; i < groupingExprsCopy.size(); ++i) {
            groupingExprsCopy.get(i).analyze(analyzer);
            if (groupingExprsCopy.get(i).contains(Expr.isAggregatePredicate())) {
                // reference the original expr in the error msg
                throw new AnalysisException("GROUP BY expression must not contain aggregate functions: "
                        + groupingExprs_.get(i).toSql());
            }
            if (groupingExprsCopy.get(i).contains(AnalyticExpr.class)) {
                // reference the original expr in the error msg
                throw new AnalysisException("GROUP BY expression must not contain analytic expressions: "
                        + groupingExprsCopy.get(i).toSql());
            }
        }
    }

    // analyze having clause
    if (havingClause_ != null) {
        if (havingClause_.contains(Predicates.instanceOf(Subquery.class))) {
            throw new AnalysisException("Subqueries are not supported in the HAVING clause.");
        }
        // substitute aliases in place (ordinals not allowed in having clause)
        havingPred_ = havingClause_.substitute(aliasSmap_, analyzer, false);
        havingPred_.checkReturnsBool("HAVING clause", true);
        // can't contain analytic exprs
        Expr analyticExpr = havingPred_.findFirstOf(AnalyticExpr.class);
        if (analyticExpr != null) {
            throw new AnalysisException(
                    "HAVING clause must not contain analytic expressions: " + analyticExpr.toSql());
        }
    }

    // Collect the aggregate expressions from the SELECT, HAVING and ORDER BY clauses
    // of this statement.
    ArrayList<FunctionCallExpr> aggExprs = Lists.newArrayList();
    TreeNode.collect(resultExprs_, Expr.isAggregatePredicate(), aggExprs);
    if (havingPred_ != null) {
        havingPred_.collect(Expr.isAggregatePredicate(), aggExprs);
    }
    if (sortInfo_ != null) {
        // TODO: Avoid evaluating aggs in ignored order-bys
        TreeNode.collect(sortInfo_.getOrderingExprs(), Expr.isAggregatePredicate(), aggExprs);
    }

    // Optionally rewrite all count(distinct <expr>) into equivalent NDV() calls.
    ExprSubstitutionMap ndvSmap = null;
    if (analyzer.getQueryCtx().getRequest().query_options.appx_count_distinct) {
        ndvSmap = new ExprSubstitutionMap();
        for (FunctionCallExpr aggExpr : aggExprs) {
            if (!aggExpr.isDistinct() || !aggExpr.getFnName().getFunction().equals("count")
                    || aggExpr.getParams().size() != 1) {
                continue;
            }
            FunctionCallExpr ndvFnCall = new FunctionCallExpr("ndv", aggExpr.getParams().exprs());
            ndvFnCall.analyzeNoThrow(analyzer);
            Preconditions.checkState(ndvFnCall.getType().equals(aggExpr.getType()));
            ndvSmap.put(aggExpr, ndvFnCall);
        }
        // Replace all count(distinct <expr>) with NDV(<expr>).
        List<Expr> substAggExprs = Expr.substituteList(aggExprs, ndvSmap, analyzer, false);
        aggExprs.clear();
        for (Expr aggExpr : substAggExprs) {
            Preconditions.checkState(aggExpr instanceof FunctionCallExpr);
            aggExprs.add((FunctionCallExpr) aggExpr);
        }
    }

    // When DISTINCT aggregates are present, non-distinct (i.e. ALL) aggregates are
    // evaluated in two phases (see AggregateInfo for more details). In particular,
    // COUNT(c) in "SELECT COUNT(c), AGG(DISTINCT d) from R" is transformed to
    // "SELECT SUM(cnt) FROM (SELECT COUNT(c) as cnt from R group by d ) S".
    // Since a group-by expression is added to the inner query it returns no rows if
    // R is empty, in which case the SUM of COUNTs will return NULL.
    // However the original COUNT(c) should have returned 0 instead of NULL in this case.
    // Therefore, COUNT([ALL]) is transformed into zeroifnull(COUNT([ALL]) if
    // i) There is no GROUP-BY clause, and
    // ii) Other DISTINCT aggregates are present.
    ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer);
    countAllMap = ExprSubstitutionMap.compose(ndvSmap, countAllMap, analyzer);
    List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false);
    aggExprs.clear();
    TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs);
    createAggInfo(groupingExprsCopy, aggExprs, analyzer);

    // combine avg smap with the one that produces the final agg output
    AggregateInfo finalAggInfo = aggInfo_.getSecondPhaseDistinctAggInfo() != null
            ? aggInfo_.getSecondPhaseDistinctAggInfo()
            : aggInfo_;

    ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose(countAllMap, finalAggInfo.getOutputSmap(),
            analyzer);
    LOG.trace("combined smap: " + combinedSmap.debugString());

    // change select list, having and ordering exprs to point to agg output. We need
    // to reanalyze the exprs at this point.
    LOG.trace("desctbl: " + analyzer.getDescTbl().debugString());
    LOG.trace("resultexprs: " + Expr.debugString(resultExprs_));
    resultExprs_ = Expr.substituteList(resultExprs_, combinedSmap, analyzer, false);
    LOG.trace("post-agg selectListExprs: " + Expr.debugString(resultExprs_));
    if (havingPred_ != null) {
        // Make sure the predicate in the HAVING clause does not contain a
        // subquery.
        Preconditions.checkState(!havingPred_.contains(Predicates.instanceOf(Subquery.class)));
        havingPred_ = havingPred_.substitute(combinedSmap, analyzer, false);
        analyzer.registerConjuncts(havingPred_, true);
        LOG.debug("post-agg havingPred: " + havingPred_.debugString());
    }
    if (sortInfo_ != null) {
        sortInfo_.substituteOrderingExprs(combinedSmap, analyzer);
        LOG.debug("post-agg orderingExprs: " + Expr.debugString(sortInfo_.getOrderingExprs()));
    }

    // check that all post-agg exprs point to agg output
    for (int i = 0; i < selectList_.getItems().size(); ++i) {
        if (!resultExprs_.get(i).isBound(finalAggInfo.getOutputTupleId())) {
            throw new AnalysisException("select list expression not produced by aggregation output "
                    + "(missing from GROUP BY clause?): " + selectList_.getItems().get(i).getExpr().toSql());
        }
    }
    if (orderByElements_ != null) {
        for (int i = 0; i < orderByElements_.size(); ++i) {
            if (!sortInfo_.getOrderingExprs().get(i).isBound(finalAggInfo.getOutputTupleId())) {
                throw new AnalysisException("ORDER BY expression not produced by aggregation output "
                        + "(missing from GROUP BY clause?): " + orderByElements_.get(i).getExpr().toSql());
            }
        }
    }
    if (havingPred_ != null) {
        if (!havingPred_.isBound(finalAggInfo.getOutputTupleId())) {
            throw new AnalysisException("HAVING clause not produced by aggregation output "
                    + "(missing from GROUP BY clause?): " + havingClause_.toSql());
        }
    }
}

From source file:org.eclipse.sirius.diagram.sequence.business.internal.refresh.SequenceCanonicalSynchronizerAdapter.java

private Set<ISequenceEvent> getEventEndsOnUpperRange(SequenceDiagram sequenceDiagram, Lifeline lifeline,
        int upperRange, Set<ISequenceEvent> sequenceEventToIgnores) {
    Set<ISequenceEvent> eventEndsOnUpperRange = new TreeSet<ISequenceEvent>(new RangeComparator());
    Set<ISequenceEvent> allSequenceEventsOnRange = new SequenceDiagramQuery(sequenceDiagram)
            .getAllSequenceEventsOn(upperRange);
    allSequenceEventsOnRange.removeAll(sequenceEventToIgnores);
    eventEndsOnUpperRange.addAll(//from  www  .  jav  a2 s.c o m
            Sets.filter(allSequenceEventsOnRange, Predicates.not(Predicates.instanceOf(Lifeline.class))));
    return eventEndsOnUpperRange;
}

From source file:org.eclipse.sirius.diagram.ui.internal.refresh.diagram.DDiagramCanonicalSynchronizer.java

private void manageCollapse(Set<View> createdNodeViews) {
    if (createdNodeViews.isEmpty() && !(gmfDiagram.getElement() instanceof DDiagram)) {
        return;// w w w  .  ja v a  2  s.com
    }
    DDiagram dDiagram = (DDiagram) gmfDiagram.getElement();
    ICollapseUpdater cu = CollapseUpdater.getICollapseUpdater(dDiagram);
    for (Node node : Iterables.filter(createdNodeViews, Node.class)) {
        EObject element = node.getElement();
        if (element instanceof DDiagramElement && cu instanceof CollapseUpdater) {
            DDiagramElement dde = (DDiagramElement) element;
            if (new DDiagramElementQuery(dde).isIndirectlyCollapsed()) {
                CollapseFilter filter = (CollapseFilter) Iterables.getFirst(Iterables
                        .filter(dde.getGraphicalFilters(), Predicates.instanceOf(CollapseFilter.class)), null);

                if (filter != null && filter.getWidth() == 0 && filter.getHeight() == 0) {
                    ((CollapseUpdater) cu).storeInFilterAndCollapseBounds(dde, Options.newSome(node), false);
                }
            }
        }
    }

}

From source file:org.apache.impala.analysis.Expr.java

/**
 * Returns a clone of child with all decimal-typed NumericLiterals in it explicitly
 * cast to targetType.//from   ww w. j  a  v a  2  s  .c  o  m
 */
private Expr convertDecimalLiteralsToFloat(Analyzer analyzer, Expr child, Type targetType)
        throws AnalysisException {
    if (!targetType.isFloatingPointType() && !targetType.isIntegerType())
        return child;
    if (targetType.isIntegerType())
        targetType = Type.DOUBLE;
    List<NumericLiteral> literals = Lists.newArrayList();
    child.collectAll(Predicates.instanceOf(NumericLiteral.class), literals);
    ExprSubstitutionMap smap = new ExprSubstitutionMap();
    for (NumericLiteral l : literals) {
        if (!l.getType().isDecimal())
            continue;
        NumericLiteral castLiteral = (NumericLiteral) l.clone();
        castLiteral.explicitlyCastToFloat(targetType);
        smap.put(l, castLiteral);
    }
    return child.substitute(smap, analyzer, false);
}

From source file:org.sosy_lab.cpachecker.core.algorithm.PredicatedAnalysisAlgorithm.java

private Precision buildInitialPrecision(Collection<Precision> precisions, Precision initialPrecision)
        throws InterruptedException, RefinementFailedException {
    if (precisions.size() == 0) {
        return initialPrecision;
    }// ww w.j ava 2s . co m

    Multimap<Pair<CFANode, Integer>, AbstractionPredicate> locationInstancPreds = HashMultimap.create();
    Multimap<CFANode, AbstractionPredicate> localPreds = HashMultimap.create();
    Multimap<String, AbstractionPredicate> functionPreds = HashMultimap.create();
    Collection<AbstractionPredicate> globalPreds = new HashSet<>();

    Collection<PredicatePrecision> seenPrecisions = new HashSet<>();

    // add initial precision
    PredicatePrecision predPrec = Precisions.extractPrecisionByType(initialPrecision, PredicatePrecision.class);
    locationInstancPreds.putAll(predPrec.getLocationInstancePredicates());
    localPreds.putAll(predPrec.getLocalPredicates());
    functionPreds.putAll(predPrec.getFunctionPredicates());
    globalPreds.addAll(predPrec.getGlobalPredicates());

    seenPrecisions.add(predPrec);

    // add further precision information obtained during refinement
    for (Precision nextPrec : precisions) {
        predPrec = Precisions.extractPrecisionByType(nextPrec, PredicatePrecision.class);

        shutdownNotifier.shutdownIfNecessary();

        if (!seenPrecisions.contains(predPrec)) {
            seenPrecisions.add(predPrec);
            locationInstancPreds.putAll(predPrec.getLocationInstancePredicates());
            localPreds.putAll(predPrec.getLocalPredicates());
            functionPreds.putAll(predPrec.getFunctionPredicates());
            globalPreds.addAll(predPrec.getGlobalPredicates());
        }
    }

    // construct new predicate precision
    PredicatePrecision newPredPrec = new PredicatePrecision(locationInstancPreds, localPreds, functionPreds,
            globalPreds);

    try {
        // assure that refinement fails if same path is encountered twice and precision not refined on that path
        if (repeatedFailure && noNewPredicates(oldPrecision, newPredPrec)) {
            throw new RefinementFailedException(Reason.RepeatedCounterexample, pathToFailure);
        }
    } catch (SolverException e) {
        throw new RefinementFailedException(Reason.InterpolationFailed, pathToFailure, e);
    }

    return Precisions.replaceByType(initialPrecision, newPredPrec,
            Predicates.instanceOf(PredicatePrecision.class));
}

From source file:org.sosy_lab.cpachecker.cpa.predicate.PredicateAbstractionRefinementStrategy.java

@Override
public void finishRefinementOfPath(ARGState pUnreachableState, List<ARGState> pAffectedStates,
        ARGReachedSet pReached, boolean pRepeatedCounterexample) throws CPAException {

    { // Add predicate "false" to unreachable location
        CFANode loc = extractLocation(pUnreachableState);
        int locInstance = getPredicateState(pUnreachableState).getAbstractionLocationsOnPath().get(loc);
        newPredicates.put(Pair.of(loc, locInstance), predAbsMgr.createPredicateFor(bfmgr.makeBoolean(false)));
        pAffectedStates.add(pUnreachableState);
    }/*ww  w. j a  v  a  2  s  . c o m*/

    // We have two different strategies for the refinement root: set it to
    // the first interpolation point or set it to highest location in the ARG
    // where the same CFANode appears.
    // Both work, so this is a heuristics question to get the best performance.
    // My benchmark showed, that at least for the benchmarks-lbe examples it is
    // best to use strategy one iff newPredicatesFound.

    // get previous precision
    UnmodifiableReachedSet reached = pReached.asReachedSet();
    PredicatePrecision targetStatePrecision = extractPredicatePrecision(
            reached.getPrecision(reached.getLastState()));

    ARGState refinementRoot = getRefinementRoot(pAffectedStates, targetStatePrecision, pRepeatedCounterexample);

    logger.log(Level.FINEST, "Removing everything below", refinementRoot, "from ARG.");

    // check whether we should restart
    refinementCount++;
    if (restartAfterRefinements > 0 && refinementCount >= restartAfterRefinements) {
        ARGState root = (ARGState) reached.getFirstState();
        // we have to use the child as the refinementRoot
        assert root.getChildren().size() == 1 : "ARG root should have exactly one child";
        refinementRoot = Iterables.getLast(root.getChildren());

        logger.log(Level.FINEST, "Restarting analysis after", refinementCount,
                "refinements by clearing the ARG.");
        refinementCount = 0;
    }

    // now create new precision
    precisionUpdate.start();
    PredicatePrecision basePrecision;
    if (keepAllPredicates) {
        basePrecision = findAllPredicatesFromSubgraph(refinementRoot, reached);
    } else {
        basePrecision = targetStatePrecision;
    }

    logger.log(Level.ALL, "Old predicate map is", basePrecision);
    logger.log(Level.ALL, "New predicates are", newPredicates);

    PredicatePrecision newPrecision;
    switch (predicateSharing) {
    case GLOBAL:
        newPrecision = basePrecision.addGlobalPredicates(newPredicates.values());
        break;
    case FUNCTION:
        newPrecision = basePrecision.addFunctionPredicates(mergePredicatesPerFunction(newPredicates));
        break;
    case LOCATION:
        newPrecision = basePrecision.addLocalPredicates(mergePredicatesPerLocation(newPredicates));
        break;
    case LOCATION_INSTANCE:
        newPrecision = basePrecision.addLocationInstancePredicates(newPredicates);
        break;
    default:
        throw new AssertionError();
    }

    logger.log(Level.ALL, "Predicate map now is", newPrecision);

    assert basePrecision.calculateDifferenceTo(newPrecision) == 0 : "We forgot predicates during refinement!";
    assert targetStatePrecision
            .calculateDifferenceTo(newPrecision) == 0 : "We forgot predicates during refinement!";

    if (dumpPredicates && dumpPredicatesFile != null) {
        Path precFile = dumpPredicatesFile.getPath(precisionUpdate.getUpdateCount());
        try (Writer w = Files.openOutputFile(precFile)) {
            precisionWriter.writePredicateMap(ImmutableSetMultimap.copyOf(newPredicates),
                    ImmutableSetMultimap.<CFANode, AbstractionPredicate>of(),
                    ImmutableSetMultimap.<String, AbstractionPredicate>of(),
                    ImmutableSet.<AbstractionPredicate>of(), newPredicates.values(), w);
        } catch (IOException e) {
            logger.logUserException(Level.WARNING, e, "Could not dump precision to file");
        }
    }

    precisionUpdate.stop();

    argUpdate.start();

    List<Precision> precisions = new ArrayList<>(2);
    List<Predicate<? super Precision>> precisionTypes = new ArrayList<>(2);

    precisions.add(newPrecision);
    precisionTypes.add(Predicates.instanceOf(PredicatePrecision.class));

    if (isValuePrecisionAvailable(pReached, refinementRoot)) {
        precisions.add(mergeAllValuePrecisionsFromSubgraph(refinementRoot, reached));
        precisionTypes.add(VariableTrackingPrecision.isMatchingCPAClass(ValueAnalysisCPA.class));
    }

    pReached.removeSubtree(refinementRoot, precisions, precisionTypes);

    assert (refinementCount > 0) || reached.size() == 1;

    if (sharePredicates) {
        pReached.updatePrecisionGlobally(newPrecision, Predicates.instanceOf(PredicatePrecision.class));
    }

    argUpdate.stop();

    newPredicates = null;
}

From source file:org.sosy_lab.cpachecker.core.algorithm.precondition.PreconditionRefinerAlgorithm.java

private void refinePrecisionForNextIteration(ReachedSet pInitialStates, ReachedSet pTo,
        PredicatePrecision pPredPrecision) {

    ARGReachedSet argReachedSetTo = new ARGReachedSet(pTo, argcpa);

    Iterator<AbstractState> rootStatesIterator = pInitialStates.iterator();

    while (rootStatesIterator.hasNext()) {
        AbstractState rootState = rootStatesIterator.next();
        ARGState as = AbstractStates.extractStateByType(rootState, ARGState.class);

        Collection<ARGState> childsToRemove = Lists.newArrayList(as.getChildren());
        for (ARGState childWithSubTreeToRemove : childsToRemove) {
            argReachedSetTo.removeSubtree(childWithSubTreeToRemove, pPredPrecision,
                    Predicates.instanceOf(PredicatePrecision.class));
        }/*from  www  .j  a  va2s  .c  o  m*/
    }

    // pTo.updatePrecisionGlobally(pPredPrecision, Predicates.instanceOf(PredicatePrecision.class));
}