Example usage for com.google.common.collect Iterators limit

List of usage examples for com.google.common.collect Iterators limit

Introduction

In this page you can find the example usage for com.google.common.collect Iterators limit.

Prototype

public static <T> Iterator<T> limit(final Iterator<T> iterator, final int limitSize) 

Source Link

Document

Creates an iterator returning the first limitSize elements of the given iterator.

Usage

From source file:it.uniroma3.mat.extendedset.intset.ImmutableSecompaxSet.java

public static ImmutableSecompaxSet intersection(Iterator<ImmutableSecompaxSet> sets) {
    ImmutableSecompaxSet partialResults = doIntersection(Iterators.limit(sets, CHUNK_SIZE));
    while (sets.hasNext()) {
        final UnmodifiableIterator<ImmutableSecompaxSet> partialIter = Iterators
                .singletonIterator(partialResults);
        partialResults = doIntersection(
                Iterators.<ImmutableSecompaxSet>concat(Iterators.limit(sets, CHUNK_SIZE), partialIter));
    }/*w w  w  .j a v a 2s  .c  o m*/
    return partialResults;
}

From source file:com.yandex.yoctodb.v1.immutable.V1Database.java

@Override
public void execute(@NotNull final Query query, @NotNull final DocumentProcessor processor) {
    final BitSet docs = query.filteredUnlimited(this, bitSetPool);
    if (docs == null) {
        return;//from   www. ja  v a 2 s. c  o m
    }

    final Iterator<? extends ScoredDocument<?>> unlimited = query.sortedUnlimited(docs, this, bitSetPool);

    if (query.getSkip() != 0) {
        Iterators.advance(unlimited, query.getSkip());
    }

    final Iterator<? extends ScoredDocument<?>> limited;
    if (query.getLimit() == Integer.MAX_VALUE) {
        limited = unlimited;
    } else {
        limited = Iterators.limit(unlimited, query.getLimit());
    }

    while (limited.hasNext()) {
        if (!processor.process(limited.next().getDocument(), this)) {
            return;
        }
    }
}

From source file:gobblin.runtime.mapreduce.GobblinWorkUnitsInputFormat.java

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {

    Path[] inputPaths = FileInputFormat.getInputPaths(context);
    if (inputPaths == null || inputPaths.length == 0) {
        throw new IOException("No input found!");
    }/*from   w  w w. j a  v a  2  s  .  c  o  m*/

    List<String> allPaths = Lists.newArrayList();

    for (Path path : inputPaths) {
        // path is a single work unit / multi work unit
        FileSystem fs = path.getFileSystem(context.getConfiguration());
        FileStatus[] inputs = fs.listStatus(path);

        if (inputs == null) {
            throw new IOException(String.format("Path %s does not exist.", path));
        }
        log.info(String.format("Found %d input files at %s: %s", inputs.length, path, Arrays.toString(inputs)));
        for (FileStatus input : inputs) {
            allPaths.add(input.getPath().toString());
        }
    }

    int maxMappers = getMaxMapper(context.getConfiguration());
    int numTasksPerMapper = allPaths.size() % maxMappers == 0 ? allPaths.size() / maxMappers
            : allPaths.size() / maxMappers + 1;

    List<InputSplit> splits = Lists.newArrayList();
    Iterator<String> pathsIt = allPaths.iterator();
    while (pathsIt.hasNext()) {
        Iterator<String> limitedIterator = Iterators.limit(pathsIt, numTasksPerMapper);
        splits.add(new GobblinSplit(Lists.newArrayList(limitedIterator)));
    }

    return splits;
}

From source file:org.apache.drill.exec.store.sys.store.ZookeeperPersistentStore.java

@Override
public Iterator<Map.Entry<String, V>> getRange(final int skip, final int take) {
    final Iterator<Map.Entry<String, byte[]>> entries = client.entries();
    Iterators.advance(entries, skip);/*www .  j a  v  a  2  s  . c o m*/
    return Iterators.transform(Iterators.limit(entries, take),
            new Function<Map.Entry<String, byte[]>, Map.Entry<String, V>>() {
                @Nullable
                @Override
                public Map.Entry<String, V> apply(@Nullable Map.Entry<String, byte[]> input) {
                    try {
                        final V value = config.getSerializer().deserialize(input.getValue());
                        return new ImmutableEntry<>(input.getKey(), value);
                    } catch (final IOException e) {
                        throw new DrillRuntimeException(
                                String.format("unable to deserialize value at key %s", input.getKey()), e);
                    }
                }
            });
}

From source file:org.apache.cassandra.index.sasi.disk.StaticTokenTreeBuilder.java

@Override
public void write(DataOutputPlus out) throws IOException {
    // if the root is not a leaf then none of the leaves have been written (all are PartialLeaf)
    // so write out the last layer of the tree by converting PartialLeaf to StaticLeaf and
    // iterating the data once more
    super.write(out);
    if (root.isLeaf())
        return;/*ww w.  j  av a  2 s  .  c  o  m*/

    RangeIterator<Long, Token> tokens = combinedTerm.getTokenIterator();
    ByteBuffer blockBuffer = ByteBuffer.allocate(BLOCK_BYTES);
    Iterator<Node> leafIterator = leftmostLeaf.levelIterator();
    while (leafIterator.hasNext()) {
        Leaf leaf = (Leaf) leafIterator.next();
        Leaf writeableLeaf = new StaticLeaf(Iterators.limit(tokens, leaf.tokenCount()), leaf);
        writeableLeaf.serialize(-1, blockBuffer);
        flushBuffer(blockBuffer, out, true);
    }

}

From source file:org.dswarm.graph.gdm.read.PropertyGraphGDMModelReader.java

@Override
public Optional<ModelBuilder> read(final OutputStream outputStream) throws DMPGraphException {

    readResources = 0;//from  w w w . j a v a  2 s.  co  m

    tx.ensureRunningTx();

    ResourceIterator<Node> recordNodesIter = null;

    try {

        final Label recordClassLabel = DynamicLabel.label(recordClassUri);

        PropertyGraphGDMModelReader.LOG.debug(
                "try to read resources for class '{}' in data model '{}' with version '{}'", recordClassLabel,
                prefixedDataModelUri, version);

        recordNodesIter = database.findNodes(recordClassLabel, GraphStatics.DATA_MODEL_PROPERTY,
                prefixedDataModelUri);

        if (recordNodesIter == null) {

            tx.succeedTx();

            PropertyGraphGDMModelReader.LOG.debug(
                    "there are no root nodes for '{}' in data model '{}'  with version '{}'; finished read {} TX successfully",
                    recordClassLabel, prefixedDataModelUri, version, type);

            return Optional.empty();
        }

        if (!recordNodesIter.hasNext()) {

            recordNodesIter.close();
            tx.succeedTx();

            PropertyGraphGDMModelReader.LOG.debug(
                    "there are no root nodes for '{}' in data model '{}'  with version '{}'; finished read {} TX successfully",
                    recordClassLabel, prefixedDataModelUri, version, type);

            return Optional.empty();
        }

        modelBuilder = new ModelBuilder(outputStream);
        size = 0;

        final Iterator<Node> nodeIterator;

        if (optionalAtMost.isPresent()) {

            nodeIterator = Iterators.limit(recordNodesIter, optionalAtMost.get());
        } else {

            nodeIterator = recordNodesIter;
        }

        while (nodeIterator.hasNext()) {

            final Node recordNode = nodeIterator.next();
            final String resourceUri = (String) recordNode.getProperty(GraphStatics.URI_PROPERTY, null);

            if (resourceUri == null) {

                LOG.debug("there is no resource URI at record node '{}'", recordNode.getId());

                continue;
            }

            final String fullResourceURI = namespaceIndex.createFullURI(resourceUri);

            currentResource = new Resource(fullResourceURI);
            startNodeHandler.handleNode(recordNode);

            if (!currentResourceStatements.isEmpty()) {

                final Set<Statement> statements = new LinkedHashSet<>();

                for (List<Statement> statementList : currentResourceStatements.values()) {

                    statements.addAll(statementList);
                }

                currentResource.setStatements(statements);
            }

            final int resourceStatementSize = currentResource.size();

            if (resourceStatementSize > 0) {

                size += resourceStatementSize;
                modelBuilder.addResource(currentResource);
                readResources++;
            } else {

                LOG.debug(
                        "couldn't find any statement for resource '{}' ('{}') in data model '{}' with version '{}'",
                        currentResource.getUri(), resourceUri, prefixedDataModelUri, version);
            }

            currentResourceStatements.clear();
        }

        recordNodesIter.close();
        tx.succeedTx();

        PropertyGraphGDMModelReader.LOG.debug("finished read {} TX successfully", type);
    } catch (final Exception e) {

        PropertyGraphGDMModelReader.LOG.error("couldn't finished read {} TX successfully", type, e);

        if (recordNodesIter != null) {

            recordNodesIter.close();
        }

        tx.failTx();
    }

    return Optional.of(modelBuilder);
}

From source file:com.yandex.yoctodb.v1.immutable.V1CompositeDatabase.java

@Override
public void execute(@NotNull final Query query, @NotNull final DocumentProcessor processor) {
    final Iterator<ScoredDocument<?>> iterator;

    // Doing merging iff there is sorting
    if (query.hasSorting()) {
        final List<Iterator<? extends ScoredDocument<?>>> results = new ArrayList<>(databases.size());
        for (IndexedDatabase db : databases) {
            final BitSet docs = query.filteredUnlimited(db, bitSetPool);

            if (docs == null) {
                continue;
            }/*from w  w w.  j  av a 2s .c o m*/

            assert !docs.isEmpty();

            results.add(query.sortedUnlimited(docs, db, bitSetPool));
        }

        if (results.isEmpty()) {
            return;
        }

        iterator = Iterators.mergeSorted(results, SCORED_DOCUMENT_COMPARATOR);
    } else {
        iterator = Iterators.concat(new FilterResultIterator(query, databases.iterator(), bitSetPool));
    }

    // Skipping values
    if (query.getSkip() != 0) {
        Iterators.advance(iterator, query.getSkip());
    }

    // Limited
    final Iterator<ScoredDocument<?>> limitedIterator;
    if (query.getLimit() == Integer.MAX_VALUE) {
        limitedIterator = iterator;
    } else {
        limitedIterator = Iterators.limit(iterator, query.getLimit());
    }

    while (limitedIterator.hasNext()) {
        final ScoredDocument<?> document = limitedIterator.next();
        if (!processor.process(document.getDocument(), document.getDatabase())) {
            return;
        }
    }
}

From source file:org.fcrepo.kernel.rdf.impl.HierarchyRdfContext.java

private Iterator<Triple> childrenContext() throws RepositoryException {

    final Iterator<javax.jcr.Node> niceChildren = Iterators.filter(new NodeIterator(node().getNodes()),
            not(nastyChildren));//  www  . j  av a2 s.c  o  m

    final Iterator<javax.jcr.Node> salientChildren;

    if (options.hasOffset()) {
        int offset = options.getOffset();
        Iterators.advance(niceChildren, offset);
    }

    if (options.hasLimit()) {
        salientChildren = Iterators.limit(niceChildren, options.getLimit());
    } else {
        salientChildren = niceChildren;
    }

    return Iterators.concat(Iterators.transform(salientChildren, child2triples()));
}

From source file:org.fcrepo.kernel.impl.rdf.impl.HierarchyRdfContext.java

private Iterator<Triple> childrenContext() throws RepositoryException {

    final Iterator<javax.jcr.Node> niceChildren = Iterators.filter(new NodeIterator(node().getNodes()),
            not(nastyChildren));//w w w .  j  a v  a 2  s  .  com

    final Iterator<javax.jcr.Node> salientChildren;

    if (options.hasOffset()) {
        final int offset = options.getOffset();
        Iterators.advance(niceChildren, offset);
    }

    if (options.hasLimit()) {
        salientChildren = Iterators.limit(niceChildren, options.getLimit());
    } else {
        salientChildren = niceChildren;
    }

    return Iterators.concat(Iterators.transform(salientChildren, child2triples()));
}

From source file:org.apache.drill.exec.store.hbase.config.HBasePersistentStore.java

@Override
public Iterator<Entry<String, V>> getRange(int skip, int take) {
    final Iterator<Entry<String, V>> iter = new Iter(take);
    Iterators.advance(iter, skip);//ww  w  .  j  a v  a  2 s.  c o m
    return Iterators.limit(iter, take);
}