Example usage for com.google.common.collect PeekingIterator next

List of usage examples for com.google.common.collect PeekingIterator next

Introduction

In this page you can find the example usage for com.google.common.collect PeekingIterator next.

Prototype

@Override
E next();

Source Link

Document

The objects returned by consecutive calls to #peek() then #next() are guaranteed to be equal to each other.

Usage

From source file:com.palantir.atlasdb.keyvalue.partition.util.RowResultUtil.java

public static RowResult<Set<Value>> allResults(PeekingIterator<RowResult<Set<Value>>> it) {
    Preconditions.checkArgument(it.hasNext());

    final byte[] row = it.peek().getRowName();
    SortedMap<byte[], Set<Value>> result = Maps.newTreeMap(UnsignedBytes.lexicographicalComparator());
    while (it.hasNext() && Arrays.equals(row, it.peek().getRowName())) {
        RowResult<Set<Value>> kvsResult = it.next();
        for (Map.Entry<Cell, Set<Value>> e : kvsResult.getCells()) {
            final byte[] col = e.getKey().getColumnName();
            if (!result.containsKey(col)) {
                result.put(col, Sets.<Value>newHashSet());
            }/*from ww w .  j av  a 2s  .  co  m*/
            result.get(col).addAll(e.getValue());
        }
    }

    // Assert that there is no multiple values for same key
    for (Set<Value> cell : result.values()) {
        for (Value val : cell) {
            for (Value otherVal : cell) {
                if (val != otherVal) {
                    assert val.getTimestamp() != otherVal.getTimestamp();
                }
            }
        }
    }

    return RowResult.create(row, result);
}

From source file:io.scigraph.annotation.EntityProcessorImpl.java

/***
 * Convert a list of annotations into annotation groups
 * /*from ww w .j  a va2  s  .  c  o  m*/
 * @param annotationList
 *          Annotations
 * @param longestOnly
 *          If shorter entities from annotation groups should be removed
 * @return annotation groups
 */
static List<EntityAnnotationGroup> getAnnotationGroups(List<EntityAnnotation> annotationList,
        boolean longestOnly) {
    List<EntityAnnotationGroup> groups = new ArrayList<>();
    Collections.sort(annotationList, Collections.reverseOrder());
    PeekingIterator<EntityAnnotation> iter = Iterators.peekingIterator(annotationList.iterator());
    while (iter.hasNext()) {
        EntityAnnotationGroup group = new EntityAnnotationGroup();
        group.add(iter.next());
        Set<Entity> entitiesInGroup = new HashSet<>();
        while (iter.hasNext() && group.intersects(iter.peek())) {
            if (!entitiesInGroup.contains(iter.peek().getToken())) {
                entitiesInGroup.add(iter.peek().getToken());
                group.add(iter.next());
            } else {
                iter.next();
            }
        }

        if (longestOnly) {
            // Remove any entries that aren't as long as the first one
            Iterator<EntityAnnotation> groupIter = group.iterator();
            int longest = group.peek().length();
            while (groupIter.hasNext()) {
                EntityAnnotation annot = groupIter.next();
                if (annot.length() < longest) {
                    groupIter.remove();
                }
            }
        }

        groups.add(group);
    }

    return groups;
}

From source file:io.prestosql.plugin.raptor.legacy.systemtables.TableMetadataSystemTable.java

private static List<Page> buildPages(MetadataDao dao, ConnectorTableMetadata tableMetadata,
        TupleDomain<Integer> tupleDomain) {
    Map<Integer, NullableValue> domainValues = extractFixedValues(tupleDomain).orElse(ImmutableMap.of());
    String schemaName = getStringValue(domainValues.get(getColumnIndex(tableMetadata, SCHEMA_NAME)));
    String tableName = getStringValue(domainValues.get(getColumnIndex(tableMetadata, TABLE_NAME)));

    PageListBuilder pageBuilder = new PageListBuilder(
            tableMetadata.getColumns().stream().map(ColumnMetadata::getType).collect(toList()));

    List<TableMetadataRow> tableRows = dao.getTableMetadataRows(schemaName, tableName);
    PeekingIterator<ColumnMetadataRow> columnRowIterator = peekingIterator(
            dao.getColumnMetadataRows(schemaName, tableName).iterator());

    for (TableMetadataRow tableRow : tableRows) {
        while (columnRowIterator.hasNext() && columnRowIterator.peek().getTableId() < tableRow.getTableId()) {
            columnRowIterator.next();
        }//from   w  w  w .j  a  v  a2 s. c om

        String temporalColumnName = null;
        SortedMap<Integer, String> sortColumnNames = new TreeMap<>();
        SortedMap<Integer, String> bucketColumnNames = new TreeMap<>();
        OptionalLong temporalColumnId = tableRow.getTemporalColumnId();
        while (columnRowIterator.hasNext() && columnRowIterator.peek().getTableId() == tableRow.getTableId()) {
            ColumnMetadataRow columnRow = columnRowIterator.next();
            if (temporalColumnId.isPresent() && columnRow.getColumnId() == temporalColumnId.getAsLong()) {
                temporalColumnName = columnRow.getColumnName();
            }
            OptionalInt sortOrdinalPosition = columnRow.getSortOrdinalPosition();
            if (sortOrdinalPosition.isPresent()) {
                sortColumnNames.put(sortOrdinalPosition.getAsInt(), columnRow.getColumnName());
            }
            OptionalInt bucketOrdinalPosition = columnRow.getBucketOrdinalPosition();
            if (bucketOrdinalPosition.isPresent()) {
                bucketColumnNames.put(bucketOrdinalPosition.getAsInt(), columnRow.getColumnName());
            }
        }

        pageBuilder.beginRow();

        // schema_name, table_name
        VARCHAR.writeSlice(pageBuilder.nextBlockBuilder(), utf8Slice(tableRow.getSchemaName()));
        VARCHAR.writeSlice(pageBuilder.nextBlockBuilder(), utf8Slice(tableRow.getTableName()));

        // temporal_column
        if (temporalColumnId.isPresent()) {
            if (temporalColumnName == null) {
                throw new PrestoException(RAPTOR_CORRUPT_METADATA,
                        format("Table ID %s has corrupt metadata (invalid temporal column ID)",
                                tableRow.getTableId()));
            }
            VARCHAR.writeSlice(pageBuilder.nextBlockBuilder(), utf8Slice(temporalColumnName));
        } else {
            pageBuilder.nextBlockBuilder().appendNull();
        }

        // ordering_columns
        writeArray(pageBuilder.nextBlockBuilder(), sortColumnNames.values());

        // distribution_name
        Optional<String> distributionName = tableRow.getDistributionName();
        if (distributionName.isPresent()) {
            VARCHAR.writeSlice(pageBuilder.nextBlockBuilder(), utf8Slice(distributionName.get()));
        } else {
            pageBuilder.nextBlockBuilder().appendNull();
        }

        // bucket_count
        OptionalInt bucketCount = tableRow.getBucketCount();
        if (bucketCount.isPresent()) {
            BIGINT.writeLong(pageBuilder.nextBlockBuilder(), bucketCount.getAsInt());
        } else {
            pageBuilder.nextBlockBuilder().appendNull();
        }

        // bucketing_columns
        writeArray(pageBuilder.nextBlockBuilder(), bucketColumnNames.values());

        // organized
        BOOLEAN.writeBoolean(pageBuilder.nextBlockBuilder(), tableRow.isOrganized());
    }

    return pageBuilder.build();
}

From source file:com.github.rinde.rinsim.geom.Graphs.java

/**
 * Create a path of connections on the specified {@link Graph} using the
 * specified {@link Point}s. If the points <code>A, B, C</code> are specified,
 * the two connections: <code>A -&gt; B</code> and <code>B -&gt; C</code> will
 * be added to the graph.//from  ww w . j av a2 s.  co m
 * @param graph The graph to which the connections will be added.
 * @param path Points that will be treated as a path.
 * @param <E> The type of connection data.
 */
public static <E extends ConnectionData> void addPath(Graph<E> graph, Iterable<Point> path) {
    final PeekingIterator<Point> it = Iterators.peekingIterator(path.iterator());
    while (it.hasNext()) {
        final Point n = it.next();
        if (it.hasNext()) {
            graph.addConnection(n, it.peek());
        }
    }
}

From source file:com.palantir.common.collect.IteratorUtils.java

/**
 * The iterators provided to this function have to be sorted and strictly increasing.
 *///from   w  ww .  j av a  2s . c  o  m
public static <T> Iterator<T> mergeIterators(Iterator<? extends T> one, Iterator<? extends T> two,
        final Comparator<? super T> ordering, final Function<? super Pair<T, T>, ? extends T> mergeFunction) {
    Preconditions.checkNotNull(mergeFunction);
    Preconditions.checkNotNull(ordering);
    final PeekingIterator<T> a = Iterators.peekingIterator(one);
    final PeekingIterator<T> b = Iterators.peekingIterator(two);
    return new AbstractIterator<T>() {
        @Override
        protected T computeNext() {
            if (!a.hasNext() && !b.hasNext()) {
                return endOfData();
            }
            if (!a.hasNext()) {
                T ret = b.next();
                if (b.hasNext()) {
                    assert ordering.compare(ret, b.peek()) < 0;
                }
                return ret;
            }
            if (!b.hasNext()) {
                T ret = a.next();
                if (a.hasNext()) {
                    assert ordering.compare(ret, a.peek()) < 0;
                }
                return ret;
            }
            T peekA = a.peek();
            T peekB = b.peek();
            int comp = ordering.compare(peekA, peekB);
            if (comp == 0) {
                return mergeFunction.apply(Pair.create(a.next(), b.next()));
            } else if (comp < 0) {
                T ret = a.next();
                if (a.hasNext()) {
                    assert ordering.compare(ret, a.peek()) < 0;
                }
                return ret;
            } else {
                T ret = b.next();
                if (b.hasNext()) {
                    assert ordering.compare(ret, b.peek()) < 0;
                }
                return ret;
            }
        }
    };
}

From source file:org.opendaylight.openflowplugin.applications.frsync.impl.strategy.SyncPlanPushStrategyFlatBatchImpl.java

static Map<Range<Integer>, Batch> mapBatchesToRanges(final List<Batch> inputBatchBag,
        final int failureIndexLimit) {
    final Map<Range<Integer>, Batch> batchMap = new LinkedHashMap<>();
    final PeekingIterator<Batch> batchPeekingIterator = Iterators.peekingIterator(inputBatchBag.iterator());
    while (batchPeekingIterator.hasNext()) {
        final Batch batch = batchPeekingIterator.next();
        final int nextBatchOrder = batchPeekingIterator.hasNext() ? batchPeekingIterator.peek().getBatchOrder()
                : failureIndexLimit;//  w  w w .j av  a 2  s.co m
        batchMap.put(Range.closed(batch.getBatchOrder(), nextBatchOrder - 1), batch);
    }
    return batchMap;
}

From source file:io.prestosql.sql.planner.optimizations.LocalProperties.java

/**
 * Attempt to match the desired properties to a sequence of known properties.
 * <p>// ww  w  .  j ava 2s .  c o m
 * Returns a list of the same length as the original. Entries are:
 * - Optional.empty(): the property was satisfied completely
 * - non-empty: the (simplified) property that was not satisfied
 */
public static <T> List<Optional<LocalProperty<T>>> match(List<LocalProperty<T>> actuals,
        List<LocalProperty<T>> desired) {
    // After normalizing actuals, each symbol should only appear once
    PeekingIterator<LocalProperty<T>> actualIterator = peekingIterator(normalizeAndPrune(actuals).iterator());

    Set<T> constants = new HashSet<>();
    boolean consumeMoreActuals = true;
    List<Optional<LocalProperty<T>>> result = new ArrayList<>(desired.size());
    for (LocalProperty<T> desiredProperty : desired) {
        while (consumeMoreActuals && actualIterator.hasNext()
                && desiredProperty.isSimplifiedBy(actualIterator.peek())) {
            constants.addAll(actualIterator.next().getColumns());
        }
        Optional<LocalProperty<T>> simplifiedDesired = desiredProperty.withConstants(constants);
        consumeMoreActuals &= !simplifiedDesired.isPresent(); // Only continue processing actuals if all previous desired properties were fully satisfied
        result.add(simplifiedDesired);
    }
    return result;
}

From source file:org.apache.cassandra.db.Memtable.java

/**
 * obtain an iterator of columns in this memtable in the specified order starting from a given column.
 *///from  w  ww .j av a  2  s  . com
public static IColumnIterator getSliceIterator(final DecoratedKey key, final ColumnFamily cf,
        SliceQueryFilter filter, AbstractType typeComparator) {
    assert cf != null;
    final boolean isSuper = cf.isSuper();
    final Collection<IColumn> filteredColumns = filter.reversed ? cf.getReverseSortedColumns()
            : cf.getSortedColumns();

    // ok to not have subcolumnComparator since we won't be adding columns to this object
    IColumn startColumn = isSuper ? new SuperColumn(filter.start, (AbstractType) null)
            : new Column(filter.start);
    Comparator<IColumn> comparator = filter.getColumnComparator(typeComparator);

    final PeekingIterator<IColumn> filteredIter = Iterators.peekingIterator(filteredColumns.iterator());
    if (!filter.reversed || filter.start.remaining() != 0) {
        while (filteredIter.hasNext() && comparator.compare(filteredIter.peek(), startColumn) < 0) {
            filteredIter.next();
        }
    }

    return new AbstractColumnIterator() {
        public ColumnFamily getColumnFamily() {
            return cf;
        }

        public DecoratedKey getKey() {
            return key;
        }

        public boolean hasNext() {
            return filteredIter.hasNext();
        }

        public IColumn next() {
            return filteredIter.next();
        }
    };
}

From source file:de.iteratec.iteraplan.presentation.memory.EntityTreeNode.java

public static void skipSubtree(EntityTreeNode currentNode, Iterator<EntityTreeNode> iterator) {
    Preconditions.checkState(iterator instanceof PeekingIterator<?>,
            "Skipping subtree requires an Iterator of type com.google.common.collect.PeekingIterator<EntityTreeNode> !");
    PeekingIterator<EntityTreeNode> peekingIterator = (PeekingIterator<EntityTreeNode>) iterator;
    int currentNodeLevel = currentNode.getTreeLevel();
    while (peekingIterator.hasNext() && peekingIterator.peek() != null
            && peekingIterator.peek().getTreeLevel() > currentNodeLevel) {
        peekingIterator.next();
    }//from   w w w  .j a  va  2s .  co  m
}

From source file:com.google.googlejavaformat.java.javadoc.JavadocLexer.java

private static void deindentPreCodeBlock(ImmutableList.Builder<Token> output, PeekingIterator<Token> tokens) {
    Deque<Token> saved = new ArrayDeque<>();
    output.add(new Token(LITERAL, tokens.next().getValue().trim()));
    while (tokens.hasNext() && tokens.peek().getType() != PRE_CLOSE_TAG) {
        Token token = tokens.next();//from  w ww. j a v a  2 s. co  m
        saved.addLast(token);
    }
    while (!saved.isEmpty() && saved.peekFirst().getType() == FORCED_NEWLINE) {
        saved.removeFirst();
    }
    while (!saved.isEmpty() && saved.peekLast().getType() == FORCED_NEWLINE) {
        saved.removeLast();
    }
    if (saved.isEmpty()) {
        return;
    }

    // move the trailing `}` to its own line
    Token last = saved.peekLast();
    boolean trailingBrace = false;
    if (last.getType() == LITERAL && last.getValue().endsWith("}")) {
        saved.removeLast();
        if (last.length() > 1) {
            saved.addLast(new Token(LITERAL, last.getValue().substring(0, last.getValue().length() - 1)));
            saved.addLast(new Token(FORCED_NEWLINE, null));
        }
        trailingBrace = true;
    }

    int trim = -1;
    for (Token token : saved) {
        if (token.getType() == LITERAL) {
            int idx = CharMatcher.isNot(' ').indexIn(token.getValue());
            if (idx != -1 && (trim == -1 || idx < trim)) {
                trim = idx;
            }
        }
    }

    output.add(new Token(FORCED_NEWLINE, "\n"));
    for (Token token : saved) {
        if (token.getType() == LITERAL) {
            output.add(new Token(LITERAL,
                    trim > 0 && token.length() > trim ? token.getValue().substring(trim) : token.getValue()));
        } else {
            output.add(token);
        }
    }

    if (trailingBrace) {
        output.add(new Token(LITERAL, "}"));
    } else {
        output.add(new Token(FORCED_NEWLINE, "\n"));
    }
}