Example usage for com.google.common.collect ImmutableList size

List of usage examples for com.google.common.collect ImmutableList size

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableList size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this list.

Usage

From source file:org.gradle.model.internal.manage.schema.extract.ManagedImplTypeSchemaExtractionStrategySupport.java

private <R> void ensureNoOverloadedMethods(ModelSchemaExtractionContext<R> extractionContext,
        final ImmutableListMultimap<String, Method> methodsByName) {
    ImmutableSet<String> methodNames = methodsByName.keySet();
    for (String methodName : methodNames) {
        ImmutableList<Method> methods = methodsByName.get(methodName);
        if (methods.size() > 1) {
            List<Method> deduped = CollectionUtils.dedup(methods, METHOD_EQUIVALENCE);
            if (deduped.size() > 1) {
                throw invalidMethods(extractionContext, "overloaded methods are not supported", deduped);
            }//from ww  w  . j  ava  2 s .c  om
        }
    }
}

From source file:buildcraft.builders.tile.TileBuilder_Neptune.java

@Override
public void onPlacedBy(EntityLivingBase placer, ItemStack stack) {
    super.onPlacedBy(placer, stack);
    EnumFacing thisFacing = getWorld().getBlockState(getPos()).getValue(BlockBCBase_Neptune.PROP_FACING);
    TileEntity inFront = getWorld().getTileEntity(getPos().offset(thisFacing.getOpposite()));
    if (inFront instanceof IPathProvider) {
        IPathProvider provider = (IPathProvider) inFront;
        ImmutableList<BlockPos> copiedPath = ImmutableList.copyOf(provider.getPath());
        if (copiedPath.size() < 2) {
            setPath(null);//from  ww  w.j  av a  2s  .c  o m
        } else {
            setPath(copiedPath);
            provider.removeFromWorld();
        }
        sendNetworkUpdate(NET_RENDER_DATA);
    }
}

From source file:solar.blaz.rondel.compiler.manager.AbstractInjectorManager.java

protected TypeElement[] parseViewComponent(ImmutableList<TypeMirror> components) {

    if (components == null || components.size() == 0) {
        return null;
    } else {/*from w ww  . j  av  a  2s.  c o  m*/

        List<TypeElement> moduleElements = new ArrayList<>();
        for (int i = 0; i < components.size(); i++) {
            TypeMirror componentClass = components.get(i);

            TypeElement component = elementUtils.getTypeElement(componentClass.toString());

            if (component.getKind() == ElementKind.INTERFACE) {
                moduleElements.add(component);
            } else {
                messager.error("Component has to be interface.", component);
            }

        }

        if (moduleElements.isEmpty()) {
            return null;
        } else {
            return moduleElements.toArray(new TypeElement[moduleElements.size()]);
        }

    }

}

From source file:org.locationtech.geogig.porcelain.SquashOp.java

/**
 * Executes the squash operation./*from   w ww. j  a  va  2  s .c  om*/
 * 
 * @return the new head after modifying the history squashing commits
 * @see org.locationtech.geogig.repository.AbstractGeoGigOp#call()
 */
@Override
protected ObjectId _call() {

    Preconditions.checkNotNull(since);
    Preconditions.checkNotNull(until);

    GraphDatabase graphDb = graphDatabase();
    Repository repository = repository();
    Platform platform = platform();

    final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call();
    Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash.");
    Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD");
    final SymRef headRef = (SymRef) currHead.get();
    final String currentBranch = headRef.getTarget();

    Preconditions.checkState(stagingArea().isClean() && workingTree().isClean(),
            "You must have a clean working tree and index to perform a squash.");

    Optional<ObjectId> ancestor = command(FindCommonAncestor.class).setLeft(since).setRight(until).call();
    Preconditions.checkArgument(ancestor.isPresent(),
            "'since' and 'until' command do not have a common ancestor");
    Preconditions.checkArgument(ancestor.get().equals(since.getId()), "Commits provided in wrong order");

    Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents");

    // we get a a list of commits to apply on top of the squashed commits
    List<RevCommit> commits = getCommitsAfterUntil();

    ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter(Ref.HEADS_PREFIX).call();

    // we create a list of all parents of those squashed commits, in case they are
    // merge commits. The resulting commit will have all these parents
    //
    // While iterating the set of commits to squash, we check that there are no branch starting
    // points among them. Any commit with more than one child causes an exception to be thrown,
    // since the squash operation does not support squashing those commits

    Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0))
            .setUntil(until.getId()).setFirstParentOnly(true).call();
    List<ObjectId> firstParents = Lists.newArrayList();
    List<ObjectId> secondaryParents = Lists.newArrayList();
    final List<ObjectId> squashedIds = Lists.newArrayList();
    RevCommit commitToSquash = until;
    while (toSquash.hasNext()) {
        commitToSquash = toSquash.next();
        squashedIds.add(commitToSquash.getId());
        Preconditions.checkArgument(graphDb.getChildren(commitToSquash.getId()).size() < 2,
                "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it and the
            // starting commit has just one child
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commitToSquash.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commitToSquash.getParentIds().size() > 1,
                    "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        }
        ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds();
        for (int i = 1; i < parentIds.size(); i++) {
            secondaryParents.add(parentIds.get(i));
        }
        firstParents.add(parentIds.get(0));
    }
    Preconditions.checkArgument(since.equals(commitToSquash),
            "Cannot reach 'since' from 'until' commit through first parentage");

    // We do the same check in the children commits
    for (RevCommit commit : commits) {
        Preconditions.checkArgument(graphDb.getChildren(commit.getId()).size() < 2,
                "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commit.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commit.getParentIds().size() > 1,
                    "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        }
    }

    ObjectId newHead;
    // rewind the head
    newHead = since.getParentIds().get(0);
    command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD).call();

    // add the current HEAD as first parent of the resulting commit
    // parents.add(0, newHead);

    // Create new commit
    List<ObjectId> parents = Lists.newArrayList();
    parents.addAll(firstParents);
    parents.addAll(secondaryParents);
    ObjectId endTree = until.getTreeId();
    CommitBuilder builder = new CommitBuilder(until);
    Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() {
        @Override
        public boolean apply(@Nullable ObjectId id) {
            return !squashedIds.contains(id);
        }

    });

    builder.setParentIds(Lists.newArrayList(filteredParents));
    builder.setTreeId(endTree);
    if (message == null) {
        message = since.getMessage();
    }
    long timestamp = platform.currentTimeMillis();
    builder.setMessage(message);
    builder.setCommitter(resolveCommitter());
    builder.setCommitterEmail(resolveCommitterEmail());
    builder.setCommitterTimestamp(timestamp);
    builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp));
    builder.setAuthorTimestamp(until.getAuthor().getTimestamp());

    RevCommit newCommit = builder.build();
    repository.objectDatabase().put(newCommit);

    newHead = newCommit.getId();
    ObjectId newTreeId = newCommit.getTreeId();

    command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call();
    command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call();

    workingTree().updateWorkHead(newTreeId);
    stagingArea().updateStageHead(newTreeId);

    // now put the other commits after the squashed one
    newHead = addCommits(commits, currentBranch, newHead);

    return newHead;

}

From source file:com.opengamma.strata.pricer.calibration.CurveCalibrator.java

private DoubleMatrix derivatives(ImmutableList<Trade> trades, ImmutableRatesProvider provider,
        ImmutableList<CurveParameterSize> orderAll, int totalParamsAll) {

    return DoubleMatrix.ofArrayObjects(trades.size(), totalParamsAll,
            i -> measures.derivative(trades.get(i), provider, orderAll));
}

From source file:org.killbill.billing.entitlement.dao.MockBlockingStateDao.java

@Override
public BlockingState getBlockingStateForService(final UUID blockableId,
        final BlockingStateType blockingStateType, final String serviceName,
        final InternalTenantContext context) {
    final List<BlockingState> states = blockingStates.get(blockableId);
    if (states == null) {
        return null;
    }/*  w w  w  . j a va 2s . c om*/
    final ImmutableList<BlockingState> filtered = ImmutableList
            .<BlockingState>copyOf(Collections2.filter(states, new Predicate<BlockingState>() {
                @Override
                public boolean apply(@Nullable final BlockingState input) {
                    return input.getService().equals(serviceName);
                }
            }));
    return filtered.size() == 0 ? null : filtered.get(filtered.size() - 1);
}

From source file:org.locationtech.geogig.api.porcelain.SquashOp.java

/**
 * Executes the squash operation./*  w w w  . ja  v  a  2  s .com*/
 * 
 * @return the new head after modifying the history squashing commits
 * @see org.locationtech.geogig.api.AbstractGeoGigOp#call()
 */
@Override
protected ObjectId _call() {

    Preconditions.checkNotNull(since);
    Preconditions.checkNotNull(until);

    GraphDatabase graphDb = graphDatabase();
    Repository repository = repository();
    Platform platform = platform();

    final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call();
    Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash.");
    Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD");
    final SymRef headRef = (SymRef) currHead.get();
    final String currentBranch = headRef.getTarget();

    Preconditions.checkState(index().isClean() && workingTree().isClean(),
            "You must have a clean working tree and index to perform a squash.");

    Optional<ObjectId> ancestor = command(FindCommonAncestor.class).setLeft(since).setRight(until).call();
    Preconditions.checkArgument(ancestor.isPresent(),
            "'since' and 'until' command do not have a common ancestor");
    Preconditions.checkArgument(ancestor.get().equals(since.getId()), "Commits provided in wrong order");

    Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents");

    // we get a a list of commits to apply on top of the squashed commits
    List<RevCommit> commits = getCommitsAfterUntil();

    ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter(Ref.HEADS_PREFIX).call();

    // we create a list of all parents of those squashed commits, in case they are
    // merge commits. The resulting commit will have all these parents
    //
    // While iterating the set of commits to squash, we check that there are no branch starting
    // points among them. Any commit with more than one child causes an exception to be thrown,
    // since the squash operation does not support squashing those commits

    Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0))
            .setUntil(until.getId()).setFirstParentOnly(true).call();
    List<ObjectId> firstParents = Lists.newArrayList();
    List<ObjectId> secondaryParents = Lists.newArrayList();
    final List<ObjectId> squashedIds = Lists.newArrayList();
    RevCommit commitToSquash = until;
    while (toSquash.hasNext()) {
        commitToSquash = toSquash.next();
        squashedIds.add(commitToSquash.getId());
        Preconditions.checkArgument(graphDb.getChildren(commitToSquash.getId()).size() < 2,
                "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it and the
            // starting commit has just one child
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commitToSquash.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commitToSquash.getParentIds().size() > 1,
                    "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        }
        ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds();
        for (int i = 1; i < parentIds.size(); i++) {
            secondaryParents.add(parentIds.get(i));
        }
        firstParents.add(parentIds.get(0));
    }
    Preconditions.checkArgument(since.equals(commitToSquash),
            "Cannot reach 'since' from 'until' commit through first parentage");

    // We do the same check in the children commits
    for (RevCommit commit : commits) {
        Preconditions.checkArgument(graphDb.getChildren(commit.getId()).size() < 2,
                "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commit.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commit.getParentIds().size() > 1,
                    "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        }
    }

    ObjectId newHead;
    // rewind the head
    newHead = since.getParentIds().get(0);
    command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD).call();

    // add the current HEAD as first parent of the resulting commit
    // parents.add(0, newHead);

    // Create new commit
    List<ObjectId> parents = Lists.newArrayList();
    parents.addAll(firstParents);
    parents.addAll(secondaryParents);
    ObjectId endTree = until.getTreeId();
    CommitBuilder builder = new CommitBuilder(until);
    Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() {
        @Override
        public boolean apply(@Nullable ObjectId id) {
            return !squashedIds.contains(id);
        }

    });

    builder.setParentIds(Lists.newArrayList(filteredParents));
    builder.setTreeId(endTree);
    if (message == null) {
        message = since.getMessage();
    }
    long timestamp = platform.currentTimeMillis();
    builder.setMessage(message);
    builder.setCommitter(resolveCommitter());
    builder.setCommitterEmail(resolveCommitterEmail());
    builder.setCommitterTimestamp(timestamp);
    builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp));
    builder.setAuthorTimestamp(until.getAuthor().getTimestamp());

    RevCommit newCommit = builder.build();
    repository.objectDatabase().put(newCommit);

    newHead = newCommit.getId();
    ObjectId newTreeId = newCommit.getTreeId();

    command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call();
    command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call();

    workingTree().updateWorkHead(newTreeId);
    index().updateStageHead(newTreeId);

    // now put the other commits after the squashed one
    newHead = addCommits(commits, currentBranch, newHead);

    return newHead;

}

From source file:dagger2.internal.codegen.ComponentValidator.java

/**
 * Validates the given component subject. Also validates any referenced subcomponents that aren't
 * already included in the {@code validatedSubcomponents} set.
 *///  w w w .j  a v  a 2  s.  c o m
public ComponentValidationReport validate(final TypeElement subject,
        Set<? extends Element> validatedSubcomponents, Set<? extends Element> validatedSubcomponentBuilders) {
    ValidationReport.Builder<TypeElement> builder = ValidationReport.Builder.about(subject);

    if (!subject.getKind().equals(INTERFACE)
            && !(subject.getKind().equals(CLASS) && subject.getModifiers().contains(ABSTRACT))) {
        builder.addItem(String.format("@%s may only be applied to an interface or abstract class",
                componentType.annotationType().getSimpleName()), subject);
    }

    ImmutableList<DeclaredType> builders = enclosedBuilders(subject, componentType.builderAnnotationType());
    if (builders.size() > 1) {
        builder.addItem(String.format(ErrorMessages.builderMsgsFor(componentType).moreThanOne(), builders),
                subject);
    }

    DeclaredType subjectType = MoreTypes.asDeclared(subject.asType());

    // TODO(gak): This should use Util.findLocalAndInheritedMethods, otherwise
    // it can return a logical method multiple times (including overrides, etc.)
    List<? extends Element> members = elements.getAllMembers(subject);
    Multimap<Element, ExecutableElement> referencedSubcomponents = LinkedHashMultimap.create();
    for (ExecutableElement method : ElementFilter.methodsIn(members)) {
        if (method.getModifiers().contains(ABSTRACT)) {
            ExecutableType resolvedMethod = MoreTypes.asExecutable(types.asMemberOf(subjectType, method));
            List<? extends TypeMirror> parameterTypes = resolvedMethod.getParameterTypes();
            List<? extends VariableElement> parameters = method.getParameters();
            TypeMirror returnType = resolvedMethod.getReturnType();

            // abstract methods are ones we have to implement, so they each need to be validated
            // first, check the return type.  if it's a subcomponent, validate that method as such.
            Optional<AnnotationMirror> subcomponentAnnotation = checkForAnnotation(returnType,
                    Subcomponent.class);
            Optional<AnnotationMirror> subcomponentBuilderAnnotation = checkForAnnotation(returnType,
                    Subcomponent.Builder.class);
            if (subcomponentAnnotation.isPresent()) {
                referencedSubcomponents.put(MoreTypes.asElement(returnType), method);
                validateSubcomponentMethod(builder, method, parameters, parameterTypes, returnType,
                        subcomponentAnnotation);
            } else if (subcomponentBuilderAnnotation.isPresent()) {
                referencedSubcomponents.put(MoreTypes.asElement(returnType).getEnclosingElement(), method);
                validateSubcomponentBuilderMethod(builder, method, parameters, returnType,
                        validatedSubcomponentBuilders);
            } else {
                // if it's not a subcomponent...
                switch (parameters.size()) {
                case 0:
                    // no parameters means that it is a provision method
                    // basically, there are no restrictions here.  \o/
                    break;
                case 1:
                    // one parameter means that it's a members injection method
                    TypeMirror onlyParameter = Iterables.getOnlyElement(parameterTypes);
                    if (!(returnType.getKind().equals(VOID) || types.isSameType(returnType, onlyParameter))) {
                        builder.addItem("Members injection methods may only return the injected type or void.",
                                method);
                    }
                    break;
                default:
                    // this isn't any method that we know how to implement...
                    builder.addItem(
                            "This method isn't a valid provision method, members injection method or "
                                    + "subcomponent factory method. Dagger cannot implement this method",
                            method);
                    break;
                }
            }
        }
    }

    for (Map.Entry<Element, Collection<ExecutableElement>> entry : referencedSubcomponents.asMap().entrySet()) {
        if (entry.getValue().size() > 1) {
            builder.addItem(String.format(
                    ErrorMessages.SubcomponentBuilderMessages.INSTANCE.moreThanOneRefToSubcomponent(),
                    entry.getKey(), entry.getValue()), subject);
        }
    }

    AnnotationMirror componentMirror = getAnnotationMirror(subject, componentType.annotationType()).get();
    ImmutableList<TypeMirror> moduleTypes = getComponentModules(componentMirror);
    moduleValidator.validateReferencedModules(subject, builder, moduleTypes);

    // Make sure we validate any subcomponents we're referencing, unless we know we validated
    // them already in this pass.
    // TODO(sameb): If subcomponents refer to each other and both aren't in
    //              'validatedSubcomponents' (e.g, both aren't compiled in this pass),
    //              then this can loop forever.
    ImmutableSet.Builder<Element> allSubcomponents = ImmutableSet.<Element>builder()
            .addAll(referencedSubcomponents.keySet());
    for (Element subcomponent : Sets.difference(referencedSubcomponents.keySet(), validatedSubcomponents)) {
        ComponentValidationReport subreport = subcomponentValidator.validate(MoreElements.asType(subcomponent),
                validatedSubcomponents, validatedSubcomponentBuilders);
        builder.addItems(subreport.report().items());
        allSubcomponents.addAll(subreport.referencedSubcomponents());
    }

    return new AutoValue_ComponentValidator_ComponentValidationReport(allSubcomponents.build(),
            builder.build());
}

From source file:org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsUtil.java

public double computeMapJoinIOCost(ImmutableList<Pair<Double, Double>> relationInfos, ImmutableBitSet streaming,
        int parallelism) {
    // Hash-join//from ww  w . j a va2 s.  co  m
    double ioCost = 0.0;
    for (int i = 0; i < relationInfos.size(); i++) {
        double cardinality = relationInfos.get(i).left;
        double averageTupleSize = relationInfos.get(i).right;
        if (!streaming.get(i)) {
            ioCost += cardinality * averageTupleSize * netCost * parallelism;
        }
    }
    return ioCost;
}