Example usage for com.google.common.collect Iterators peekingIterator

List of usage examples for com.google.common.collect Iterators peekingIterator

Introduction

In this page you can find the example usage for com.google.common.collect Iterators peekingIterator.

Prototype

@Deprecated
public static <T> PeekingIterator<T> peekingIterator(PeekingIterator<T> iterator) 

Source Link

Document

Simply returns its argument.

Usage

From source file:org.renjin.eval.Calls.java

/**
 * Argument matching is done by a three-pass process:
 * <ol>//from w w  w. j av  a2 s.  c o m
 * <li><strong>Exact matching on tags.</strong> For each named supplied argument the list of formal arguments
 *  is searched for an item whose name matches exactly. It is an error to have the same formal
 * argument match several actuals or vice versa.</li>
 *
 * <li><strong>Partial matching on tags.</strong> Each remaining named supplied argument is compared to the
 * remaining formal arguments using partial matching. If the name of the supplied argument
 * matches exactly with the first part of a formal argument then the two arguments are considered
 * to be matched. It is an error to have multiple partial matches.
 *  Notice that if f <- function(fumble, fooey) fbody, then f(f = 1, fo = 2) is illegal,
 * even though the 2nd actual argument only matches fooey. f(f = 1, fooey = 2) is legal
 * though since the second argument matches exactly and is removed from consideration for
 * partial matching. If the formal arguments contain ... then partial matching is only applied to
 * arguments that precede it.
 *
 * <li><strong>Positional matching.</strong> Any unmatched formal arguments are bound to unnamed supplied arguments,
 * in order. If there is a ... argument, it will take up the remaining arguments, tagged or not.
 * If any arguments remain unmatched an error is declared.
 *
 * @param actuals the actual arguments supplied to the list
 * @param populateMissing
 */
public static PairList matchArguments(PairList formals, PairList actuals, boolean populateMissing) {

    PairList.Builder result = new PairList.Builder();

    List<PairList.Node> unmatchedActuals = Lists.newArrayList();
    for (PairList.Node argNode : actuals.nodes()) {
        unmatchedActuals.add(argNode);
    }

    List<PairList.Node> unmatchedFormals = Lists.newArrayList(formals.nodes());

    // do exact matching
    for (ListIterator<PairList.Node> formalIt = unmatchedFormals.listIterator(); formalIt.hasNext();) {
        PairList.Node formal = formalIt.next();
        if (formal.hasTag()) {
            Symbol name = formal.getTag();
            Collection<PairList.Node> matches = Collections2.filter(unmatchedActuals,
                    PairList.Predicates.matches(name));

            if (matches.size() == 1) {
                PairList.Node match = first(matches);
                result.add(name, match.getValue());
                formalIt.remove();
                unmatchedActuals.remove(match);

            } else if (matches.size() > 1) {
                throw new EvalException(
                        String.format("Multiple named values provided for argument '%s'", name.getPrintName()));
            }
        }
    }

    // do partial matching
    Collection<PairList.Node> remainingNamedFormals = filter(unmatchedFormals, PairList.Predicates.hasTag());
    for (Iterator<PairList.Node> actualIt = unmatchedActuals.iterator(); actualIt.hasNext();) {
        PairList.Node actual = actualIt.next();
        if (actual.hasTag()) {
            Collection<PairList.Node> matches = Collections2.filter(remainingNamedFormals,
                    PairList.Predicates.startsWith(actual.getTag()));

            if (matches.size() == 1) {
                PairList.Node match = first(matches);
                result.add(match.getTag(), actual.getValue());
                actualIt.remove();
                unmatchedFormals.remove(match);

            } else if (matches.size() > 1) {
                throw new EvalException(
                        String.format("Provided argument '%s' matches multiple named formal arguments: %s",
                                actual.getTag().getPrintName(), argumentTagList(matches)));
            }
        }
    }

    // match any unnamed args positionally

    Iterator<PairList.Node> formalIt = unmatchedFormals.iterator();
    PeekingIterator<PairList.Node> actualIt = Iterators.peekingIterator(unmatchedActuals.iterator());
    while (formalIt.hasNext()) {
        PairList.Node formal = formalIt.next();
        if (Symbols.ELLIPSES.equals(formal.getTag())) {
            PromisePairList.Builder promises = new PromisePairList.Builder();
            while (actualIt.hasNext()) {
                PairList.Node actual = actualIt.next();
                promises.add(actual.getRawTag(), actual.getValue());
            }
            result.add(formal.getTag(), promises.build());

        } else if (hasNextUnTagged(actualIt)) {
            result.add(formal.getTag(), nextUnTagged(actualIt).getValue());

        } else if (populateMissing) {
            result.add(formal.getTag(), Symbol.MISSING_ARG);
        }
    }
    if (actualIt.hasNext()) {
        throw new EvalException(String.format("Unmatched positional arguments"));
    }

    return result.build();
}

From source file:org.geogit.api.plumbing.diff.DiffCounter.java

private DiffObjectCount countChildrenDiffs(Iterator<Node> leftTree, Iterator<Node> rightTree) {

    final Ordering<Node> storageOrder = new NodeStorageOrder();

    DiffObjectCount count = new DiffObjectCount();

    PeekingIterator<Node> left = Iterators.peekingIterator(leftTree);
    PeekingIterator<Node> right = Iterators.peekingIterator(rightTree);

    while (left.hasNext() && right.hasNext()) {
        Node peekLeft = left.peek();
        Node peekRight = right.peek();

        if (0 == storageOrder.compare(peekLeft, peekRight)) {
            // same path, consume both
            peekLeft = left.next();/*from  w w w . j a v a2 s.  c o  m*/
            peekRight = right.next();
            if (!peekLeft.getObjectId().equals(peekRight.getObjectId())) {
                // find the diffs between these two specific refs
                if (RevObject.TYPE.FEATURE.equals(peekLeft.getType())) {
                    checkState(RevObject.TYPE.FEATURE.equals(peekRight.getType()));
                    count.addFeatures(1);
                } else {
                    checkState(RevObject.TYPE.TREE.equals(peekLeft.getType()));
                    checkState(RevObject.TYPE.TREE.equals(peekRight.getType()));
                    ObjectId leftTreeId = peekLeft.getObjectId();
                    ObjectId rightTreeId = peekRight.getObjectId();
                    count.add(countDiffs(leftTreeId, rightTreeId));
                }
            }
        } else if (peekLeft == storageOrder.min(peekLeft, peekRight)) {
            peekLeft = left.next();// consume only the left value
            count.add(aggregateSize(ImmutableList.of(peekLeft)));
        } else {
            peekRight = right.next();// consume only the right value
            count.add(aggregateSize(ImmutableList.of(peekRight)));
        }
    }

    if (left.hasNext()) {
        count.add(countRemaining(left));
    } else if (right.hasNext()) {
        count.add(countRemaining(right));
    }
    Preconditions.checkState(!left.hasNext());
    Preconditions.checkState(!right.hasNext());
    return count;
}

From source file:org.pircbotx.Channel.java

/**
 * Sets the mode of the channel. If there is a getMode() waiting on this,
 * fire it./*from w ww  .  ja  va 2s  . c o m*/
 *
 * @param mode
 */
protected void setMode(String mode, ImmutableList<String> modeParsed) {
    synchronized (modeChangeLock) {
        this.mode = mode;

        //Parse out mode
        PeekingIterator<String> params = Iterators.peekingIterator(modeParsed.iterator());

        //Process modes letter by letter, grabbing paramaters as needed
        boolean adding = true;
        String modeLetters = params.next();
        for (int i = 0; i < modeLetters.length(); i++) {
            char curModeChar = modeLetters.charAt(i);
            if (curModeChar == '+')
                adding = true;
            else if (curModeChar == '-')
                adding = false;
            else {
                ChannelModeHandler modeHandler = bot.getConfiguration().getChannelModeHandlers()
                        .get(curModeChar);
                if (modeHandler != null)
                    modeHandler.handleMode(bot, this, null, null, params, adding, false);
            }
        }

        if (modeChangeLatch != null)
            modeChangeLatch.countDown();
    }
}

From source file:org.eclipse.milo.opcua.binaryschema.AbstractCodec.java

@Override
public StructureT decode(SerializationContext context, OpcUaBinaryStreamDecoder decoder)
        throws UaSerializationException {

    LinkedHashMap<String, MemberT> members = new LinkedHashMap<>();

    PeekingIterator<FieldType> fieldIterator = Iterators.peekingIterator(structuredType.getField().iterator());

    while (fieldIterator.hasNext()) {
        FieldType field = fieldIterator.next();
        String fieldName = field.getName();
        String typeName = field.getTypeName().getLocalPart();
        String typeNamespace = field.getTypeName().getNamespaceURI();

        if (!fieldIsPresent(field, members)) {
            continue;
        }//from  w  w  w  . ja  v  a  2  s.c  om

        boolean typeNamespaceIsUa = Namespaces.OPC_UA.equals(typeNamespace)
                || Namespaces.OPC_UA_BSD.equals(typeNamespace);

        if (fieldIsScalar(field)) {
            if (typeNamespaceIsUa && READERS.containsKey(typeName)) {
                Object value = READERS.get(typeName).apply(decoder);

                members.put(fieldName, opcUaToMemberTypeScalar(fieldName, value, typeName));
            } else {
                Object value = context.decode(typeNamespace, typeName, decoder);

                members.put(fieldName, opcUaToMemberTypeScalar(fieldName, value, typeName));
            }
        } else {
            if (field.isIsLengthInBytes()) {
                throw new UaSerializationException(StatusCodes.Bad_DecodingError,
                        "IsLengthInBytes=true not supported");
            }

            int length = fieldLength(field, members);

            if ("Bit".equals(typeName) && typeNamespaceIsUa) {
                BigInteger bitAccumulation = BigInteger.valueOf(0L);

                for (int i = 0; i < length; i++) {
                    BigInteger bitValue = BigInteger.valueOf(decoder.readBit());

                    bitAccumulation = bitAccumulation.or(bitValue.shiftLeft(i));
                }

                members.put(fieldName, opcUaToMemberTypeArray(fieldName, bitAccumulation.intValue(), typeName));
            } else {
                Object[] values = new Object[length];

                if (typeNamespaceIsUa && READERS.containsKey(typeName)) {
                    for (int i = 0; i < length; i++) {
                        Object value = READERS.get(typeName).apply(decoder);

                        values[i] = value;
                    }
                } else {
                    for (int i = 0; i < length; i++) {
                        Object value = context.decode(typeNamespace, typeName, decoder);

                        values[i] = value;
                    }
                }

                members.put(fieldName, opcUaToMemberTypeArray(fieldName, values, typeName));
            }
        }
    }

    return createStructure(structuredType.getName(), members);
}

From source file:com.palantir.atlasdb.keyvalue.impl.InMemoryKeyValueService.java

private <T> ClosableIterator<RowResult<T>> getRangeInternal(String tableName, final RangeRequest range,
        final ResultProducer<T> resultProducer) {
    ConcurrentNavigableMap<Key, byte[]> tableMap = getTableMap(tableName).entries;
    if (range.isReverse()) {
        tableMap = tableMap.descendingMap();
    }//from   w w w. j a v  a 2 s .  c  o  m
    if (range.getStartInclusive().length != 0) {
        if (range.isReverse()) {
            Cell startCell = Cells.createLargestCellForRow(range.getStartInclusive());
            tableMap = tableMap.tailMap(new Key(startCell, Long.MIN_VALUE));
        } else {
            Cell startCell = Cells.createSmallestCellForRow(range.getStartInclusive());
            tableMap = tableMap.tailMap(new Key(startCell, Long.MIN_VALUE));
        }
    }
    if (range.getEndExclusive().length != 0) {
        if (range.isReverse()) {
            Cell endCell = Cells.createLargestCellForRow(range.getEndExclusive());
            tableMap = tableMap.headMap(new Key(endCell, Long.MAX_VALUE));
        } else {
            Cell endCell = Cells.createSmallestCellForRow(range.getEndExclusive());
            tableMap = tableMap.headMap(new Key(endCell, Long.MAX_VALUE));
        }
    }
    final PeekingIterator<Entry<Key, byte[]>> it = Iterators.peekingIterator(tableMap.entrySet().iterator());
    return ClosableIterators.wrap(new AbstractIterator<RowResult<T>>() {
        @Override
        protected RowResult<T> computeNext() {
            while (true) {
                if (!it.hasNext()) {
                    return endOfData();
                }
                ImmutableSortedMap.Builder<byte[], T> result = ImmutableSortedMap
                        .orderedBy(UnsignedBytes.lexicographicalComparator());
                Key key = it.peek().getKey();
                byte[] row = key.row;
                Iterator<Entry<Key, byte[]>> cellIter = takeCell(it, key);
                collectValueForTimestamp(key.col, cellIter, result, range, resultProducer);

                while (it.hasNext()) {
                    if (!it.peek().getKey().matchesRow(row)) {
                        break;
                    }
                    key = it.peek().getKey();
                    cellIter = takeCell(it, key);
                    collectValueForTimestamp(key.col, cellIter, result, range, resultProducer);
                }
                SortedMap<byte[], T> columns = result.build();
                if (!columns.isEmpty()) {
                    return RowResult.create(row, columns);
                }
            }
        }

    });
}

From source file:org.hashtrees.HashTreesImpl.java

private int syncSegment(long treeId, int segId, HashTrees remoteTree, boolean doUpdate) throws IOException {
    PeekingIterator<SegmentData> localDataItr = Iterators.peekingIterator(getSegment(treeId, segId).iterator());
    PeekingIterator<SegmentData> remoteDataItr = Iterators
            .peekingIterator(remoteTree.getSegment(treeId, segId).iterator());

    List<KeyValue> kvsForAddition = new ArrayList<KeyValue>();
    List<ByteBuffer> keysForRemoval = new ArrayList<ByteBuffer>();

    SegmentData local, remote;//from   w w w.j  av a  2s . c  o  m
    while (localDataItr.hasNext() || remoteDataItr.hasNext()) {
        local = localDataItr.hasNext() ? localDataItr.peek() : null;
        remote = remoteDataItr.hasNext() ? remoteDataItr.peek() : null;

        int compRes = compareSegmentKeys(local, remote);
        if (compRes == 0) {
            if (!Arrays.equals(local.getDigest(), remote.getDigest())) {
                ByteBuffer key = ByteBuffer.wrap(local.getKey());
                byte[] value = store.get(local.getKey());
                if (value != null)
                    kvsForAddition.add(new KeyValue(key, ByteBuffer.wrap(value)));
            }
            localDataItr.next();
            remoteDataItr.next();
        } else if (compRes < 0) {
            ByteBuffer key = ByteBuffer.wrap(local.getKey());
            byte[] value = store.get(local.getKey());
            if (value != null)
                kvsForAddition.add(new KeyValue(key, ByteBuffer.wrap(value)));
            localDataItr.next();
        } else {
            keysForRemoval.add(ByteBuffer.wrap(remote.getKey()));
            remoteDataItr.next();
        }
    }

    if (doUpdate) {
        if (kvsForAddition.size() > 0)
            remoteTree.sPut(kvsForAddition);
        if (keysForRemoval.size() > 0)
            remoteTree.sRemove(keysForRemoval);
    }

    return kvsForAddition.size() + keysForRemoval.size();
}

From source file:org.diqube.data.types.lng.array.RunLengthLongArray.java

@Override
public List<Long> getMultiple(List<Integer> sortedIndices) throws ArrayIndexOutOfBoundsException {
    // first: Find the internal indices that we need to resolve
    List<Integer> internalIndicesToResolveSorted = new ArrayList<>();

    try {/*from   w  ww .j  a  v a  2  s.  c o m*/
        int internalSize;
        if (compressedCounts != null)
            internalSize = compressedCounts.length;
        else
            internalSize = delegateCompressedCounts.size();

        PeekingIterator<Integer> sortedIndicesIt = Iterators.peekingIterator(sortedIndices.iterator());
        if (sortedIndicesIt.peek() < 0 || sortedIndicesIt.peek() >= size)
            throw new ArrayIndexOutOfBoundsException("Array index out of bounds: Requested index "
                    + sortedIndicesIt.peek() + " but have only " + size + " elements.");

        int decompressedCount = 0;
        for (int pos = 0; pos < internalSize && sortedIndicesIt.hasNext(); pos++) {
            long lengthValue;
            if (compressedValues != null)
                lengthValue = compressedCounts[pos];
            else
                lengthValue = delegateCompressedCounts.get(pos);

            decompressedCount += lengthValue;

            while (sortedIndicesIt.hasNext() && sortedIndicesIt.peek() < decompressedCount) {
                internalIndicesToResolveSorted.add(pos);
                sortedIndicesIt.next();
                if (sortedIndicesIt.hasNext() && (sortedIndicesIt.peek() < 0 || sortedIndicesIt.peek() >= size))
                    throw new ArrayIndexOutOfBoundsException("Array index out of bounds: Requested index "
                            + sortedIndicesIt.peek() + " but have only " + size + " elements.");
            }
        }
    } catch (Throwable t) {
        throw t;
    }

    // second: resolve those internal indices
    List<Long> res = new ArrayList<>();
    if (compressedValues != null) {
        for (int idx : internalIndicesToResolveSorted)
            res.add(compressedValues[idx]);
    } else {
        // unique-ify indices to resolve
        List<Integer> delegateIdx = new ArrayList<>(new TreeSet<>(internalIndicesToResolveSorted));
        List<Long> delegateRes = delegateCompressedValue.getMultiple(delegateIdx);

        PeekingIterator<Integer> delegateIdxIt = Iterators.peekingIterator(delegateIdx.iterator());
        PeekingIterator<Long> delegateResIt = Iterators.peekingIterator(delegateRes.iterator());

        for (int idx : internalIndicesToResolveSorted) {
            while (delegateIdxIt.peek() != idx) {
                delegateIdxIt.next();
                delegateResIt.next();
            }

            res.add(delegateResIt.peek());
        }
    }
    return res;
}

From source file:org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.java

/**
 * Group files with similar min timestamp into buckets. Files with recent min timestamps are grouped together into
 * buckets designated to short timespans while files with older timestamps are grouped into buckets representing
 * longer timespans.//www .  jav a  2  s  .c  o m
 * @param files pairs consisting of a file and its min timestamp
 * @param timeUnit
 * @param base
 * @param now
 * @return a list of buckets of files. The list is ordered such that the files with newest timestamps come first.
 *         Each bucket is also a list of files ordered from newest to oldest.
 */
@VisibleForTesting
static <T> List<List<T>> getBuckets(Collection<Pair<T, Long>> files, long timeUnit, int base, long now,
        long maxWindowSize) {
    // Sort files by age. Newest first.
    final List<Pair<T, Long>> sortedFiles = Lists.newArrayList(files);
    Collections.sort(sortedFiles, Collections.reverseOrder(new Comparator<Pair<T, Long>>() {
        public int compare(Pair<T, Long> p1, Pair<T, Long> p2) {
            return p1.right.compareTo(p2.right);
        }
    }));

    List<List<T>> buckets = Lists.newArrayList();
    Target target = getInitialTarget(now, timeUnit, maxWindowSize);
    PeekingIterator<Pair<T, Long>> it = Iterators.peekingIterator(sortedFiles.iterator());

    outerLoop: while (it.hasNext()) {
        while (!target.onTarget(it.peek().right)) {
            // If the file is too new for the target, skip it.
            if (target.compareToTimestamp(it.peek().right) < 0) {
                it.next();

                if (!it.hasNext())
                    break outerLoop;
            } else // If the file is too old for the target, switch targets.
                target = target.nextTarget(base);
        }
        List<T> bucket = Lists.newArrayList();
        while (target.onTarget(it.peek().right)) {
            bucket.add(it.next().left);

            if (!it.hasNext())
                break;
        }
        buckets.add(bucket);
    }

    return buckets;
}

From source file:com.indeed.lsmtree.core.ItUtil.java

public static <E> Iterator<E> merge(Collection<Iterator<E>> iterators, final Comparator<E> comparator) {
    Comparator<PeekingIterator<E>> heapComparator = new Comparator<PeekingIterator<E>>() {
        @Override/*  w w  w  .  j  av a  2  s .  c  o  m*/
        public int compare(final PeekingIterator<E> o1, final PeekingIterator<E> o2) {
            return comparator.compare(o1.peek(), o2.peek());
        }
    };
    final ObjectHeapPriorityQueue<PeekingIterator<E>> heap = new ObjectHeapPriorityQueue<PeekingIterator<E>>(
            heapComparator);
    for (Iterator<E> iterator : iterators) {
        if (iterator.hasNext()) {
            if (iterator instanceof PeekingIterator) {
                heap.enqueue((PeekingIterator<E>) iterator);
            } else {
                heap.enqueue(Iterators.peekingIterator(iterator));
            }
        }
    }
    return new AbstractIterator<E>() {
        @Override
        protected E computeNext() {
            if (heap.isEmpty()) {
                endOfData();
                return null;
            }
            PeekingIterator<E> iterator = heap.first();
            E ret = iterator.next();
            if (iterator.hasNext()) {
                heap.changed();
            } else {
                heap.dequeue();
            }
            return ret;
        }
    };
}

From source file:org.apache.cassandra.db.Memtable.java

/**
 * obtain an iterator of columns in this memtable in the specified order starting from a given column.
 *//*from   www . j av  a2  s. c o  m*/
public static IColumnIterator getSliceIterator(final DecoratedKey key, final ColumnFamily cf,
        SliceQueryFilter filter, AbstractType typeComparator) {
    assert cf != null;
    final boolean isSuper = cf.isSuper();
    final Collection<IColumn> filteredColumns = filter.reversed ? cf.getReverseSortedColumns()
            : cf.getSortedColumns();

    // ok to not have subcolumnComparator since we won't be adding columns to this object
    IColumn startColumn = isSuper ? new SuperColumn(filter.start, (AbstractType) null)
            : new Column(filter.start);
    Comparator<IColumn> comparator = filter.getColumnComparator(typeComparator);

    final PeekingIterator<IColumn> filteredIter = Iterators.peekingIterator(filteredColumns.iterator());
    if (!filter.reversed || filter.start.remaining() != 0) {
        while (filteredIter.hasNext() && comparator.compare(filteredIter.peek(), startColumn) < 0) {
            filteredIter.next();
        }
    }

    return new AbstractColumnIterator() {
        public ColumnFamily getColumnFamily() {
            return cf;
        }

        public DecoratedKey getKey() {
            return key;
        }

        public boolean hasNext() {
            return filteredIter.hasNext();
        }

        public IColumn next() {
            return filteredIter.next();
        }
    };
}