Example usage for com.google.common.collect Iterators concat

List of usage examples for com.google.common.collect Iterators concat

Introduction

In this page you can find the example usage for com.google.common.collect Iterators concat.

Prototype

public static <T> Iterator<T> concat(final Iterator<? extends Iterator<? extends T>> inputs) 

Source Link

Document

Combines multiple iterators into a single iterator.

Usage

From source file:org.fcrepo.kernel.rdf.impl.NodeTypeRdfContext.java

/**
 * Convert a NodeType into an RDF stream by capturing the supertypes, node
 * definitions, and property definitions of the type as RDFS triples.
 *
 * @param nodeType/* ww w .j  a va  2 s  . c  om*/
 * @throws RepositoryException
 */
public NodeTypeRdfContext(final NodeType nodeType) throws RepositoryException {
    super();

    final Node nodeTypeResource = getResource(nodeType).asNode();
    final String nodeTypeName = nodeType.getName();

    LOGGER.trace("Adding triples for nodeType: {} with URI: {}", nodeTypeName, nodeTypeResource.getURI());

    concat(map(nodeType.getDeclaredSupertypes(),

            new Function<NodeType, Triple>() {

                @Override
                public Triple apply(final NodeType input) {
                    final Node supertypeNode;
                    try {
                        supertypeNode = getResource(input).asNode();
                        LOGGER.trace("Adding triple for nodeType: {} with subclass: {}", nodeTypeName,
                                supertypeNode.getURI());
                        return create(nodeTypeResource, subClassOf.asNode(), supertypeNode);

                    } catch (final RepositoryException e) {
                        throw propagate(e);
                    }
                }
            }));

    concat(Iterators
            .concat(Iterators.transform(
                    Iterators.filter(forArray(nodeType.getDeclaredChildNodeDefinitions()),
                            not(isWildcardResidualDefinition)),
                    new NodeDefinitionToTriples(nodeTypeResource))));

    concat(Iterators
            .concat(Iterators.transform(
                    Iterators.filter(forArray(nodeType.getDeclaredPropertyDefinitions()),
                            not(isWildcardResidualDefinition)),
                    new PropertyDefinitionToTriples(nodeTypeResource))));

    concat(create(nodeTypeResource, type.asNode(), Class.asNode()),
            create(nodeTypeResource, label.asNode(), createLiteral(nodeTypeName)));
}

From source file:com.sk89q.worldedit.regions.RegionIntersection.java

@SuppressWarnings({ "unchecked", "rawtypes" })
@Override/*from  w  w  w .jav a  2  s.  c o  m*/
public Iterator<BlockVector> iterator() {
    Iterator<BlockVector>[] iterators = (Iterator<BlockVector>[]) new Iterator[regions.size()];
    for (int i = 0; i < regions.size(); i++) {
        iterators[i] = regions.get(i).iterator();
    }
    return Iterators.concat(iterators);
}

From source file:org.fcrepo.kernel.rdf.impl.HierarchyRdfContext.java

private Iterator<Triple> childrenContext() throws RepositoryException {

    final Iterator<javax.jcr.Node> niceChildren = Iterators.filter(new NodeIterator(node().getNodes()),
            not(nastyChildren));/*from   ww  w  .  ja va 2s  . c  om*/

    final Iterator<javax.jcr.Node> salientChildren;

    if (options.hasOffset()) {
        int offset = options.getOffset();
        Iterators.advance(niceChildren, offset);
    }

    if (options.hasLimit()) {
        salientChildren = Iterators.limit(niceChildren, options.getLimit());
    } else {
        salientChildren = niceChildren;
    }

    return Iterators.concat(Iterators.transform(salientChildren, child2triples()));
}

From source file:org.apache.mahout.common.iterator.sequencefile.SequenceFileDirIterator.java

private void init(FileStatus[] statuses, final boolean reuseKeyValueInstances, final Configuration conf) {

    /*/*from  w w w .java2s  . c o m*/
     * prevent NPEs. Unfortunately, Hadoop would return null for list if nothing
     * was qualified. In this case, which is a corner case, we should assume an
     * empty iterator, not an NPE.
     */
    if (statuses == null) {
        statuses = NO_STATUSES;
    }

    Iterator<FileStatus> fileStatusIterator = Iterators.forArray(statuses);

    Iterator<Iterator<Pair<K, V>>> fsIterators = Iterators.transform(fileStatusIterator,
            new Function<FileStatus, Iterator<Pair<K, V>>>() {
                @Override
                public Iterator<Pair<K, V>> apply(FileStatus from) {
                    try {
                        SequenceFileIterator<K, V> iterator = new SequenceFileIterator<K, V>(from.getPath(),
                                reuseKeyValueInstances, conf);
                        iterators.add(iterator);
                        return iterator;
                    } catch (IOException ioe) {
                        throw new IllegalStateException(from.getPath().toString(), ioe);
                    }
                }
            });

    Collections.reverse(iterators); // close later in reverse order

    delegate = Iterators.concat(fsIterators);
}

From source file:org.fcrepo.kernel.impl.rdf.impl.HierarchyRdfContext.java

private Iterator<Triple> childrenContext() throws RepositoryException {

    final Iterator<javax.jcr.Node> niceChildren = Iterators.filter(new NodeIterator(node().getNodes()),
            not(nastyChildren));//  w  w w. j av  a2 s  .  c  o m

    final Iterator<javax.jcr.Node> salientChildren;

    if (options.hasOffset()) {
        final int offset = options.getOffset();
        Iterators.advance(niceChildren, offset);
    }

    if (options.hasLimit()) {
        salientChildren = Iterators.limit(niceChildren, options.getLimit());
    } else {
        salientChildren = niceChildren;
    }

    return Iterators.concat(Iterators.transform(salientChildren, child2triples()));
}

From source file:org.fcrepo.kernel.impl.rdf.impl.NodeTypeRdfContext.java

/**
 * Convert a NodeType into an RDF stream by capturing the supertypes, node
 * definitions, and property definitions of the type as RDFS triples.
 *
 * @param nodeType the node type//from  www  . jav  a 2s  .c  om
 * @throws RepositoryException if repository exception occurred
 */
public NodeTypeRdfContext(final NodeType nodeType) throws RepositoryException {
    super();

    final Node nodeTypeResource = getResource(nodeType).asNode();
    final String nodeTypeName = nodeType.getName();

    LOGGER.trace("Adding triples for nodeType: {} with URI: {}", nodeTypeName, nodeTypeResource.getURI());

    concat(Collections2.transform(copyOf(nodeType.getDeclaredSupertypes()),

            new Function<NodeType, Triple>() {

                @Override
                public Triple apply(final NodeType input) {
                    final Node supertypeNode;
                    try {
                        supertypeNode = getResource(input).asNode();
                        LOGGER.trace("Adding triple for nodeType: {} with subclass: {}", nodeTypeName,
                                supertypeNode.getURI());
                        return create(nodeTypeResource, subClassOf.asNode(), supertypeNode);

                    } catch (final RepositoryException e) {
                        throw propagate(e);
                    }
                }
            }));

    concat(Iterators
            .concat(Iterators.transform(
                    Iterators.filter(forArray(nodeType.getDeclaredChildNodeDefinitions()),
                            not(isWildcardResidualDefinition)),
                    new NodeDefinitionToTriples(nodeTypeResource))));

    concat(Iterators
            .concat(Iterators.transform(
                    Iterators.filter(forArray(nodeType.getDeclaredPropertyDefinitions()),
                            not(isWildcardResidualDefinition)),
                    new PropertyDefinitionToTriples(nodeTypeResource))));

    concat(create(nodeTypeResource, type.asNode(), Class.asNode()),
            create(nodeTypeResource, label.asNode(), createLiteral(nodeTypeName)));
}

From source file:org.gradle.api.internal.tasks.DefaultTaskOutputs.java

@Override
public SortedSet<TaskOutputFilePropertySpec> getFileProperties() {
    if (fileProperties == null) {
        TaskPropertyUtils.ensurePropertiesHaveNames(filePropertiesInternal);
        Iterator<TaskOutputFilePropertySpec> flattenedProperties = Iterators
                .concat(Iterables.transform(filePropertiesInternal,
                        new Function<TaskPropertySpec, Iterator<? extends TaskOutputFilePropertySpec>>() {
                            @Override
                            public Iterator<? extends TaskOutputFilePropertySpec> apply(
                                    TaskPropertySpec propertySpec) {
                                if (propertySpec instanceof CompositeTaskOutputPropertySpec) {
                                    return ((CompositeTaskOutputPropertySpec) propertySpec)
                                            .resolveToOutputProperties();
                                } else {
                                    return Iterators
                                            .singletonIterator((TaskOutputFilePropertySpec) propertySpec);
                                }//from   www.  j  a  v  a2 s.c  o m
                            }
                        }).iterator());
        fileProperties = TaskPropertyUtils.collectFileProperties("output", flattenedProperties);
    }
    return fileProperties;
}

From source file:org.apache.mahout.common.iterator.sequencefile.SequenceFileDirValueIterator.java

private void init(FileStatus[] statuses, Comparator<FileStatus> ordering, final boolean reuseKeyValueInstances,
        final Configuration conf) throws IOException {

    /*/* w  w w  . ja  va2 s .  c  o  m*/
     * prevent NPEs. Unfortunately, Hadoop would return null for list if nothing
     * was qualified. In this case, which is a corner case, we should assume an
     * empty iterator, not an NPE.
     */
    if (statuses == null) {
        statuses = NO_STATUSES;
    }

    if (ordering != null) {
        Arrays.sort(statuses, ordering);
    }
    Iterator<FileStatus> fileStatusIterator = Iterators.forArray(statuses);

    try {

        Iterator<Iterator<V>> fsIterators = Iterators.transform(fileStatusIterator,
                new Function<FileStatus, Iterator<V>>() {
                    @Override
                    public Iterator<V> apply(FileStatus from) {
                        try {
                            SequenceFileValueIterator<V> iterator = new SequenceFileValueIterator<V>(
                                    from.getPath(), reuseKeyValueInstances, conf);
                            iterators.add(iterator);
                            return iterator;
                        } catch (IOException ioe) {
                            throw new IllegalStateException(from.getPath().toString(), ioe);
                        }
                    }
                });

        Collections.reverse(iterators); // close later in reverse order

        delegate = Iterators.concat(fsIterators);

    } finally {
        /*
         * prevent file handle leaks in case one of handles fails to open. If some
         * of the files fail to open, constructor will fail and close() will never
         * be called. Thus, those handles that did open in constructor, would leak
         * out, unless we specifically handle it here.
         */
        IOUtils.close(iterators);
    }
}

From source file:org.bugkillers.bus.eventbus.SubscriberRegistry_.java

/**
 * Gets an iterator representing an immutable snapshot of all subscribers to the given event at
 * the time this method is called./*from  w w  w  . java 2  s  .  co  m*/
 */
Iterator<Subscriber_> getSubscribers(Object event) {
    ImmutableSet<Class<?>> eventTypes = flattenHierarchy(event.getClass());

    List<Iterator<Subscriber_>> subscriberIterators = Lists.newArrayListWithCapacity(eventTypes.size());

    for (Class<?> eventType : eventTypes) {
        CopyOnWriteArraySet<Subscriber_> eventSubscribers = subscribers.get(eventType);
        if (eventSubscribers != null) {
            // eager no-copy snapshot
            subscriberIterators.add(eventSubscribers.iterator());
        }
    }

    return Iterators.concat(subscriberIterators.iterator());
}

From source file:co.cask.cdap.etl.common.PipelinePhase.java

@Override
public Iterator<StageInfo> iterator() {
    List<Iterator<StageInfo>> iterators = new ArrayList<>(stages.size());
    for (Map.Entry<String, Set<StageInfo>> stagesEntry : stages.entrySet()) {
        iterators.add(stagesEntry.getValue().iterator());
    }/*from   w ww .  j  av a2  s . c o  m*/
    return Iterators.concat(iterators.iterator());
}