Example usage for com.google.common.collect ImmutableList get

List of usage examples for com.google.common.collect ImmutableList get

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableList get.

Prototype

E get(int index);

Source Link

Document

Returns the element at the specified position in this list.

Usage

From source file:org.apache.drill.common.expression.ExpressionStringBuilder.java

@Override
public Void visitFunctionCall(FunctionCall call, StringBuilder sb) throws RuntimeException {
    ImmutableList<LogicalExpression> args = call.args;
    sb.append(call.getName());/*from  w w w .jav a  2  s. c  o  m*/
    sb.append("(");
    for (int i = 0; i < args.size(); i++) {
        if (i != 0) {
            sb.append(", ");
        }
        args.get(i).accept(this, sb);
    }
    sb.append(") ");
    return null;
}

From source file:org.geogit.api.plumbing.diff.FeatureDiff.java

/**
 * //from  w  w w  . j a  v  a 2s.  co  m
 * @param path the full path to the feature, including its name
 * @param newRevFeature the new version of the feature
 * @param oldRevFeature the old version of the feature
 * @param newRevFeatureType the new version of the feature type
 * @param oldRevFeatureType the old version of the feature type
 * @param all - true if all attributes should be added regardless of change
 */
public FeatureDiff(String path, RevFeature newRevFeature, RevFeature oldRevFeature,
        RevFeatureType newRevFeatureType, RevFeatureType oldRevFeatureType, boolean all) {

    this.path = path;
    this.newFeatureType = newRevFeatureType;
    this.oldFeatureType = oldRevFeatureType;
    diffs = new HashMap<PropertyDescriptor, AttributeDiff>();

    ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.sortedDescriptors();
    ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.sortedDescriptors();
    ImmutableList<Optional<Object>> oldValues = oldRevFeature.getValues();
    ImmutableList<Optional<Object>> newValues = newRevFeature.getValues();
    BitSet updatedAttributes = new BitSet(newValues.size());
    for (int i = 0; i < oldAttributes.size(); i++) {
        Optional<Object> oldValue = oldValues.get(i);
        int idx = newAttributes.indexOf(oldAttributes.get(i));
        if (idx != -1) {
            Optional<Object> newValue = newValues.get(idx);
            if (!oldValue.equals(newValue) || all) {
                if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) {
                    diffs.put(oldAttributes.get(i),
                            new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()),
                                    Optional.fromNullable((Geometry) newValue.orNull())));
                } else {
                    diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, newValue));
                }
            }
            updatedAttributes.set(idx);
        } else {
            if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) {
                diffs.put(oldAttributes.get(i),
                        new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()),
                                Optional.fromNullable((Geometry) null)));
            } else {
                diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, null));
            }
        }
    }
    updatedAttributes.flip(0, newValues.size());
    for (int i = updatedAttributes.nextSetBit(0); i >= 0; i = updatedAttributes.nextSetBit(i + 1)) {
        if (Geometry.class.isAssignableFrom(newAttributes.get(i).getType().getBinding())) {
            diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) null),
                    Optional.fromNullable((Geometry) newValues.get(i).orNull())));
        } else {
            diffs.put(newAttributes.get(i), new GenericAttributeDiffImpl(null, newValues.get(i)));
        }
    }

}

From source file:org.eclipse.xtext.builder.impl.QueuedBuildData.java

private IQueuedBuildDataContribution getContribution(
        ImmutableList<? extends IQueuedBuildDataContribution> contributions) {
    switch (contributions.size()) {
    case 0://from   ww w. j  av  a  2s  .  c  o  m
        return new NullContribution();
    case 1:
        return contributions.get(0);
    default:
        return new CompositeContribution(contributions);
    }
}

From source file:org.apache.drill.common.expression.ExpressionStringBuilder.java

@Override
public Void visitFunctionHolderExpression(FunctionHolderExpression holder, StringBuilder sb)
        throws RuntimeException {
    ImmutableList<LogicalExpression> args = holder.args;
    sb.append(holder.getName());//from w  ww . ja va2  s. c  om
    sb.append("(");
    for (int i = 0; i < args.size(); i++) {
        if (i != 0) {
            sb.append(", ");
        }
        args.get(i).accept(this, sb);
    }
    sb.append(") ");
    return null;
}

From source file:org.locationtech.geogig.api.plumbing.diff.FeatureDiff.java

/**
 * //from  w w w  .  jav  a  2 s . c om
 * @param path the full path to the feature, including its name
 * @param newRevFeature the new version of the feature
 * @param oldRevFeature the old version of the feature
 * @param newRevFeatureType the new version of the feature type
 * @param oldRevFeatureType the old version of the feature type
 * @param all - true if all attributes should be added regardless of change
 */
public FeatureDiff(String path, RevFeature newRevFeature, RevFeature oldRevFeature,
        RevFeatureType newRevFeatureType, RevFeatureType oldRevFeatureType, boolean all) {

    this.path = path;
    this.newFeatureType = newRevFeatureType;
    this.oldFeatureType = oldRevFeatureType;
    diffs = new HashMap<PropertyDescriptor, AttributeDiff>();

    ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.sortedDescriptors();
    ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.sortedDescriptors();
    ImmutableList<Optional<Object>> oldValues = oldRevFeature.getValues();
    ImmutableList<Optional<Object>> newValues = newRevFeature.getValues();
    BitSet updatedAttributes = new BitSet(newValues.size());
    for (int i = 0; i < oldAttributes.size(); i++) {
        Optional<Object> oldValue = oldValues.get(i);
        int idx = newAttributes.indexOf(oldAttributes.get(i));
        if (idx != -1) {
            Optional<Object> newValue = newValues.get(idx);
            if (!oldValue.equals(newValue) || all) {
                if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) {
                    diffs.put(oldAttributes.get(i),
                            new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()),
                                    Optional.fromNullable((Geometry) newValue.orNull())));
                } else {
                    diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, newValue));
                }
            }
            updatedAttributes.set(idx);
        } else {
            if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) {
                diffs.put(oldAttributes.get(i),
                        new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()),
                                Optional.fromNullable((Geometry) null)));
            } else {
                diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, null));
            }
        }
    }
    updatedAttributes.flip(0, newValues.size());
    for (int i = updatedAttributes.nextSetBit(0); i >= 0; i = updatedAttributes.nextSetBit(i + 1)) {
        if (Geometry.class.isAssignableFrom(newAttributes.get(i).getType().getBinding())) {
            diffs.put(newAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) null),
                    Optional.fromNullable((Geometry) newValues.get(i).orNull())));
        } else {
            diffs.put(newAttributes.get(i), new GenericAttributeDiffImpl(null, newValues.get(i)));
        }
    }

}

From source file:org.glowroot.storage.repo.helper.RollupLevelService.java

public long getDataPointIntervalMillis(long captureTimeFrom, long captureTimeTo) throws Exception {
    long millis = captureTimeTo - captureTimeFrom;
    long timeAgoMillis = clock.currentTimeMillis() - captureTimeFrom;
    ImmutableList<Integer> rollupExpirationHours = configRepository.getStorageConfig().rollupExpirationHours();
    List<RollupConfig> rollupConfigs = configRepository.getRollupConfigs();
    for (int i = 0; i < rollupConfigs.size() - 1; i++) {
        RollupConfig currRollupConfig = rollupConfigs.get(i);
        RollupConfig nextRollupConfig = rollupConfigs.get(i + 1);
        if (millis < nextRollupConfig.viewThresholdMillis()
                && HOURS.toMillis(rollupExpirationHours.get(i)) > timeAgoMillis) {
            return currRollupConfig.intervalMillis();
        }/*from ww w .ja va  2 s.  c  o m*/
    }
    return rollupConfigs.get(rollupConfigs.size() - 1).intervalMillis();
}

From source file:org.locationtech.geogig.porcelain.SquashOp.java

/**
 * Executes the squash operation./*from   www .j  av a2 s  . c o m*/
 * 
 * @return the new head after modifying the history squashing commits
 * @see org.locationtech.geogig.repository.AbstractGeoGigOp#call()
 */
@Override
protected ObjectId _call() {

    Preconditions.checkNotNull(since);
    Preconditions.checkNotNull(until);

    GraphDatabase graphDb = graphDatabase();
    Repository repository = repository();
    Platform platform = platform();

    final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call();
    Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash.");
    Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD");
    final SymRef headRef = (SymRef) currHead.get();
    final String currentBranch = headRef.getTarget();

    Preconditions.checkState(stagingArea().isClean() && workingTree().isClean(),
            "You must have a clean working tree and index to perform a squash.");

    Optional<ObjectId> ancestor = command(FindCommonAncestor.class).setLeft(since).setRight(until).call();
    Preconditions.checkArgument(ancestor.isPresent(),
            "'since' and 'until' command do not have a common ancestor");
    Preconditions.checkArgument(ancestor.get().equals(since.getId()), "Commits provided in wrong order");

    Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents");

    // we get a a list of commits to apply on top of the squashed commits
    List<RevCommit> commits = getCommitsAfterUntil();

    ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter(Ref.HEADS_PREFIX).call();

    // we create a list of all parents of those squashed commits, in case they are
    // merge commits. The resulting commit will have all these parents
    //
    // While iterating the set of commits to squash, we check that there are no branch starting
    // points among them. Any commit with more than one child causes an exception to be thrown,
    // since the squash operation does not support squashing those commits

    Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0))
            .setUntil(until.getId()).setFirstParentOnly(true).call();
    List<ObjectId> firstParents = Lists.newArrayList();
    List<ObjectId> secondaryParents = Lists.newArrayList();
    final List<ObjectId> squashedIds = Lists.newArrayList();
    RevCommit commitToSquash = until;
    while (toSquash.hasNext()) {
        commitToSquash = toSquash.next();
        squashedIds.add(commitToSquash.getId());
        Preconditions.checkArgument(graphDb.getChildren(commitToSquash.getId()).size() < 2,
                "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it and the
            // starting commit has just one child
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commitToSquash.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commitToSquash.getParentIds().size() > 1,
                    "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        }
        ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds();
        for (int i = 1; i < parentIds.size(); i++) {
            secondaryParents.add(parentIds.get(i));
        }
        firstParents.add(parentIds.get(0));
    }
    Preconditions.checkArgument(since.equals(commitToSquash),
            "Cannot reach 'since' from 'until' commit through first parentage");

    // We do the same check in the children commits
    for (RevCommit commit : commits) {
        Preconditions.checkArgument(graphDb.getChildren(commit.getId()).size() < 2,
                "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commit.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commit.getParentIds().size() > 1,
                    "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        }
    }

    ObjectId newHead;
    // rewind the head
    newHead = since.getParentIds().get(0);
    command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD).call();

    // add the current HEAD as first parent of the resulting commit
    // parents.add(0, newHead);

    // Create new commit
    List<ObjectId> parents = Lists.newArrayList();
    parents.addAll(firstParents);
    parents.addAll(secondaryParents);
    ObjectId endTree = until.getTreeId();
    CommitBuilder builder = new CommitBuilder(until);
    Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() {
        @Override
        public boolean apply(@Nullable ObjectId id) {
            return !squashedIds.contains(id);
        }

    });

    builder.setParentIds(Lists.newArrayList(filteredParents));
    builder.setTreeId(endTree);
    if (message == null) {
        message = since.getMessage();
    }
    long timestamp = platform.currentTimeMillis();
    builder.setMessage(message);
    builder.setCommitter(resolveCommitter());
    builder.setCommitterEmail(resolveCommitterEmail());
    builder.setCommitterTimestamp(timestamp);
    builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp));
    builder.setAuthorTimestamp(until.getAuthor().getTimestamp());

    RevCommit newCommit = builder.build();
    repository.objectDatabase().put(newCommit);

    newHead = newCommit.getId();
    ObjectId newTreeId = newCommit.getTreeId();

    command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call();
    command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call();

    workingTree().updateWorkHead(newTreeId);
    stagingArea().updateStageHead(newTreeId);

    // now put the other commits after the squashed one
    newHead = addCommits(commits, currentBranch, newHead);

    return newHead;

}

From source file:org.eclipse.xtext.builder.impl.ToBeBuiltComputer.java

private IToBeBuiltComputerContribution getContribution(
        ImmutableList<? extends IToBeBuiltComputerContribution> contributedInstances) {
    switch (contributedInstances.size()) {
    case 0://ww w . j  av  a 2s.c o  m
        return new NullContribution();
    case 1:
        return contributedInstances.get(0);
    default:
        return new CompositeContribution(contributedInstances);
    }
}

From source file:org.apache.gearpump.sql.rule.GearAggregationRule.java

private void updateWindowTrigger(RelOptRuleCall call, Aggregate aggregate, Project project) {
    ImmutableBitSet groupByFields = aggregate.getGroupSet();
    List<RexNode> projectMapping = project.getProjects();

    WindowFunction windowFn = new GlobalWindowFunction();
    Trigger triggerFn;/*from w ww  .jav a  2  s  .co m*/
    int windowFieldIdx = -1;
    Duration allowedLatence = Duration.ZERO;

    for (int groupField : groupByFields.asList()) {
        RexNode projNode = projectMapping.get(groupField);
        if (projNode instanceof RexCall) {
            SqlOperator op = ((RexCall) projNode).op;
            ImmutableList<RexNode> parameters = ((RexCall) projNode).operands;
            String functionName = op.getName();
            switch (functionName) {
            case "TUMBLE":
                windowFieldIdx = groupField;
                windowFn = (WindowFunction) FixedWindows
                        .apply(Duration.ofMillis(getWindowParameterAsMillis(parameters.get(1))));
                if (parameters.size() == 3) {
                    GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(2))
                            .getValue();
                    triggerFn = createTriggerWithDelay(delayTime);
                    allowedLatence = (Duration.ofMillis(delayTime.getTimeInMillis()));
                }
                break;
            case "HOP":
                windowFieldIdx = groupField;
                windowFn = (WindowFunction) SlidingWindows.apply(
                        Duration.ofMillis(getWindowParameterAsMillis(parameters.get(1))),
                        Duration.ofMillis(getWindowParameterAsMillis(parameters.get(2))));

                if (parameters.size() == 4) {
                    GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(3))
                            .getValue();
                    triggerFn = createTriggerWithDelay(delayTime);
                    allowedLatence = (Duration.ofMillis(delayTime.getTimeInMillis()));
                }
                break;
            case "SESSION":
                windowFieldIdx = groupField;
                windowFn = (WindowFunction) SessionWindows
                        .apply(Duration.ofMillis(getWindowParameterAsMillis(parameters.get(1))));
                if (parameters.size() == 3) {
                    GregorianCalendar delayTime = (GregorianCalendar) ((RexLiteral) parameters.get(2))
                            .getValue();
                    triggerFn = createTriggerWithDelay(delayTime);
                    allowedLatence = (Duration.ofMillis(delayTime.getTimeInMillis()));
                }
                break;
            default:
                break;
            }
        }
    }

    try {
        GearAggregationRel gearRel = new GearAggregationRel(aggregate.getCluster(),
                aggregate.getTraitSet().replace(GearLogicalConvention.INSTANCE),
                convert(aggregate.getInput(),
                        aggregate.getInput().getTraitSet().replace(GearLogicalConvention.INSTANCE)),
                aggregate.indicator, aggregate.getGroupSet(), aggregate.getGroupSets(),
                aggregate.getAggCallList());
        gearRel.buildGearPipeline(GearConfiguration.app, null);
        GearConfiguration.app.submit().waitUntilFinish();
    } catch (Exception e) {
        LOG.error(e.getMessage());
    }

}

From source file:org.locationtech.geogig.api.porcelain.SquashOp.java

/**
 * Executes the squash operation./*from   w w w  . j  av a  2s  .c  o  m*/
 * 
 * @return the new head after modifying the history squashing commits
 * @see org.locationtech.geogig.api.AbstractGeoGigOp#call()
 */
@Override
protected ObjectId _call() {

    Preconditions.checkNotNull(since);
    Preconditions.checkNotNull(until);

    GraphDatabase graphDb = graphDatabase();
    Repository repository = repository();
    Platform platform = platform();

    final Optional<Ref> currHead = command(RefParse.class).setName(Ref.HEAD).call();
    Preconditions.checkState(currHead.isPresent(), "Repository has no HEAD, can't squash.");
    Preconditions.checkState(currHead.get() instanceof SymRef, "Can't squash from detached HEAD");
    final SymRef headRef = (SymRef) currHead.get();
    final String currentBranch = headRef.getTarget();

    Preconditions.checkState(index().isClean() && workingTree().isClean(),
            "You must have a clean working tree and index to perform a squash.");

    Optional<ObjectId> ancestor = command(FindCommonAncestor.class).setLeft(since).setRight(until).call();
    Preconditions.checkArgument(ancestor.isPresent(),
            "'since' and 'until' command do not have a common ancestor");
    Preconditions.checkArgument(ancestor.get().equals(since.getId()), "Commits provided in wrong order");

    Preconditions.checkArgument(!since.getParentIds().isEmpty(), "'since' commit has no parents");

    // we get a a list of commits to apply on top of the squashed commits
    List<RevCommit> commits = getCommitsAfterUntil();

    ImmutableSet<Ref> refs = command(ForEachRef.class).setPrefixFilter(Ref.HEADS_PREFIX).call();

    // we create a list of all parents of those squashed commits, in case they are
    // merge commits. The resulting commit will have all these parents
    //
    // While iterating the set of commits to squash, we check that there are no branch starting
    // points among them. Any commit with more than one child causes an exception to be thrown,
    // since the squash operation does not support squashing those commits

    Iterator<RevCommit> toSquash = command(LogOp.class).setSince(since.getParentIds().get(0))
            .setUntil(until.getId()).setFirstParentOnly(true).call();
    List<ObjectId> firstParents = Lists.newArrayList();
    List<ObjectId> secondaryParents = Lists.newArrayList();
    final List<ObjectId> squashedIds = Lists.newArrayList();
    RevCommit commitToSquash = until;
    while (toSquash.hasNext()) {
        commitToSquash = toSquash.next();
        squashedIds.add(commitToSquash.getId());
        Preconditions.checkArgument(graphDb.getChildren(commitToSquash.getId()).size() < 2,
                "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it and the
            // starting commit has just one child
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commitToSquash.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commitToSquash.getParentIds().size() > 1,
                    "The commits to squash include a branch starting point. Squashing that type of commit is not supported.");
        }
        ImmutableList<ObjectId> parentIds = commitToSquash.getParentIds();
        for (int i = 1; i < parentIds.size(); i++) {
            secondaryParents.add(parentIds.get(i));
        }
        firstParents.add(parentIds.get(0));
    }
    Preconditions.checkArgument(since.equals(commitToSquash),
            "Cannot reach 'since' from 'until' commit through first parentage");

    // We do the same check in the children commits
    for (RevCommit commit : commits) {
        Preconditions.checkArgument(graphDb.getChildren(commit.getId()).size() < 2,
                "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        for (Ref ref : refs) {
            // In case a branch has been created but no commit has been made on it
            Preconditions.checkArgument(
                    !ref.getObjectId().equals(commit.getId())
                            || ref.getObjectId().equals(currHead.get().getObjectId())
                            || commit.getParentIds().size() > 1,
                    "The commits after the ones to squash include a branch starting point. This scenario is not supported.");
        }
    }

    ObjectId newHead;
    // rewind the head
    newHead = since.getParentIds().get(0);
    command(ResetOp.class).setCommit(Suppliers.ofInstance(newHead)).setMode(ResetMode.HARD).call();

    // add the current HEAD as first parent of the resulting commit
    // parents.add(0, newHead);

    // Create new commit
    List<ObjectId> parents = Lists.newArrayList();
    parents.addAll(firstParents);
    parents.addAll(secondaryParents);
    ObjectId endTree = until.getTreeId();
    CommitBuilder builder = new CommitBuilder(until);
    Collection<ObjectId> filteredParents = Collections2.filter(parents, new Predicate<ObjectId>() {
        @Override
        public boolean apply(@Nullable ObjectId id) {
            return !squashedIds.contains(id);
        }

    });

    builder.setParentIds(Lists.newArrayList(filteredParents));
    builder.setTreeId(endTree);
    if (message == null) {
        message = since.getMessage();
    }
    long timestamp = platform.currentTimeMillis();
    builder.setMessage(message);
    builder.setCommitter(resolveCommitter());
    builder.setCommitterEmail(resolveCommitterEmail());
    builder.setCommitterTimestamp(timestamp);
    builder.setCommitterTimeZoneOffset(platform.timeZoneOffset(timestamp));
    builder.setAuthorTimestamp(until.getAuthor().getTimestamp());

    RevCommit newCommit = builder.build();
    repository.objectDatabase().put(newCommit);

    newHead = newCommit.getId();
    ObjectId newTreeId = newCommit.getTreeId();

    command(UpdateRef.class).setName(currentBranch).setNewValue(newHead).call();
    command(UpdateSymRef.class).setName(Ref.HEAD).setNewValue(currentBranch).call();

    workingTree().updateWorkHead(newTreeId);
    index().updateStageHead(newTreeId);

    // now put the other commits after the squashed one
    newHead = addCommits(commits, currentBranch, newHead);

    return newHead;

}