Example usage for com.google.common.collect Iterators emptyIterator

List of usage examples for com.google.common.collect Iterators emptyIterator

Introduction

In this page you can find the example usage for com.google.common.collect Iterators emptyIterator.

Prototype

@Deprecated
public static <T> UnmodifiableIterator<T> emptyIterator() 

Source Link

Document

Returns the empty iterator.

Usage

From source file:org.polarsys.reqcycle.traceability.cache.emfbased.CacheTraceabilityEngine.java

@Override
protected Iterator<Pair<Link, Reachable>> doGetOneLevelTraceability(Reachable source, DIRECTION direction,
        Predicate<Pair<Link, Reachable>> scope) {
    TraceableElement s = getTraceableElement(source);
    if (s != null) {
        IPicker picker = getPicker(direction, scope);
        try {/*from w  w  w.j ava 2s. c o  m*/
            List<Pair<Link, Reachable>> result = new ArrayList<Pair<Link, Reachable>>();
            Iterable<?> elements = picker.getNexts(source);
            return Lists
                    .newArrayList(Iterables.transform(elements, new Function<Object, Pair<Link, Reachable>>() {
                        public Pair<Link, Reachable> apply(Object o) {
                            return (Pair<Link, Reachable>) o;
                        }
                    })).iterator();
        } catch (PickerExecutionException e) {
            e.printStackTrace();
        }
    }
    return Iterators.emptyIterator();
}

From source file:io.druid.client.CachingClusteredClient.java

@Override
public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext) {
    final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query);

    final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap();

    final List<Pair<Interval, byte[]>> cachedResults = Lists.newArrayList();
    final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap();

    final boolean useCache = query.getContextUseCache(true) && strategy != null && cacheConfig.isUseCache()
            && cacheConfig.isQueryCacheable(query);
    final boolean populateCache = query.getContextPopulateCache(true) && strategy != null
            && cacheConfig.isPopulateCache() && cacheConfig.isQueryCacheable(query);
    final boolean isBySegment = query.getContextBySegment(false);

    final ImmutableMap.Builder<String, Object> contextBuilder = new ImmutableMap.Builder<>();

    final int priority = query.getContextPriority(0);
    contextBuilder.put("priority", priority);

    if (populateCache) {
        // prevent down-stream nodes from caching results as well if we are populating the cache
        contextBuilder.put(CacheConfig.POPULATE_CACHE, false);
        contextBuilder.put("bySegment", true);
    }//from   w  ww  .j a  v  a2 s.  co  m
    contextBuilder.put("intermediate", true);

    TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource());

    if (timeline == null) {
        return Sequences.empty();
    }

    // build set of segments to query
    Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet();

    List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList();

    for (Interval interval : query.getIntervals()) {
        Iterables.addAll(serversLookup, timeline.lookup(interval));
    }

    // Let tool chest filter out unneeded segments
    final List<TimelineObjectHolder<String, ServerSelector>> filteredServersLookup = toolChest
            .filterSegments(query, serversLookup);

    for (TimelineObjectHolder<String, ServerSelector> holder : filteredServersLookup) {
        for (PartitionChunk<ServerSelector> chunk : holder.getObject()) {
            ServerSelector selector = chunk.getObject();
            final SegmentDescriptor descriptor = new SegmentDescriptor(holder.getInterval(),
                    holder.getVersion(), chunk.getChunkNumber());

            segments.add(Pair.of(selector, descriptor));
        }
    }

    final byte[] queryCacheKey;

    if ((populateCache || useCache) // implies strategy != null
            && !isBySegment) // explicit bySegment queries are never cached
    {
        queryCacheKey = strategy.computeCacheKey(query);
    } else {
        queryCacheKey = null;
    }

    if (queryCacheKey != null) {
        // cachKeys map must preserve segment ordering, in order for shards to always be combined in the same order
        Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys = Maps.newLinkedHashMap();
        for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
            final Cache.NamedKey segmentCacheKey = CacheUtil.computeSegmentCacheKey(
                    segment.lhs.getSegment().getIdentifier(), segment.rhs, queryCacheKey);
            cacheKeys.put(segment, segmentCacheKey);
        }

        // Pull cached segments from cache and remove from set of segments to query
        final Map<Cache.NamedKey, byte[]> cachedValues;
        if (useCache) {
            cachedValues = cache.getBulk(cacheKeys.values());
        } else {
            cachedValues = ImmutableMap.of();
        }

        for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry : cacheKeys.entrySet()) {
            Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey();
            Cache.NamedKey segmentCacheKey = entry.getValue();
            final Interval segmentQueryInterval = segment.rhs.getInterval();

            final byte[] cachedValue = cachedValues.get(segmentCacheKey);
            if (cachedValue != null) {
                // remove cached segment from set of segments to query
                segments.remove(segment);
                cachedResults.add(Pair.of(segmentQueryInterval, cachedValue));
            } else if (populateCache) {
                // otherwise, if populating cache, add segment to list of segments to cache
                final String segmentIdentifier = segment.lhs.getSegment().getIdentifier();
                cachePopulatorMap.put(String.format("%s_%s", segmentIdentifier, segmentQueryInterval),
                        new CachePopulator(cache, objectMapper, segmentCacheKey));
            }
        }
    }

    // Compile list of all segments not pulled from cache
    for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
        final QueryableDruidServer queryableDruidServer = segment.lhs.pick();

        if (queryableDruidServer == null) {
            log.makeAlert("No servers found for %s?! How can this be?!", segment.rhs).emit();
        } else {
            final DruidServer server = queryableDruidServer.getServer();
            List<SegmentDescriptor> descriptors = serverSegments.get(server);

            if (descriptors == null) {
                descriptors = Lists.newArrayList();
                serverSegments.put(server, descriptors);
            }

            descriptors.add(segment.rhs);
        }
    }

    return new LazySequence<>(new Supplier<Sequence<T>>() {
        @Override
        public Sequence<T> get() {
            ArrayList<Sequence<T>> sequencesByInterval = Lists.newArrayList();
            addSequencesFromCache(sequencesByInterval);
            addSequencesFromServer(sequencesByInterval);

            return mergeCachedAndUncachedSequences(sequencesByInterval, toolChest);
        }

        private void addSequencesFromCache(ArrayList<Sequence<T>> listOfSequences) {
            if (strategy == null) {
                return;
            }

            final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache();
            final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz();
            for (Pair<Interval, byte[]> cachedResultPair : cachedResults) {
                final byte[] cachedResult = cachedResultPair.rhs;
                Sequence<Object> cachedSequence = new BaseSequence<>(
                        new BaseSequence.IteratorMaker<Object, Iterator<Object>>() {
                            @Override
                            public Iterator<Object> make() {
                                try {
                                    if (cachedResult.length == 0) {
                                        return Iterators.emptyIterator();
                                    }

                                    return objectMapper.readValues(
                                            objectMapper.getFactory().createParser(cachedResult),
                                            cacheObjectClazz);
                                } catch (IOException e) {
                                    throw Throwables.propagate(e);
                                }
                            }

                            @Override
                            public void cleanup(Iterator<Object> iterFromMake) {
                            }
                        });
                listOfSequences.add(Sequences.map(cachedSequence, pullFromCacheFunction));
            }
        }

        private void addSequencesFromServer(ArrayList<Sequence<T>> listOfSequences) {
            listOfSequences.ensureCapacity(listOfSequences.size() + serverSegments.size());

            final Query<Result<BySegmentResultValueClass<T>>> rewrittenQuery = (Query<Result<BySegmentResultValueClass<T>>>) query
                    .withOverriddenContext(contextBuilder.build());

            // Loop through each server, setting up the query and initiating it.
            // The data gets handled as a Future and parsed in the long Sequence chain in the resultSeqToAdd setter.
            for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry : serverSegments.entrySet()) {
                final DruidServer server = entry.getKey();
                final List<SegmentDescriptor> descriptors = entry.getValue();

                final QueryRunner clientQueryable = serverView.getQueryRunner(server);

                if (clientQueryable == null) {
                    log.error("WTF!? server[%s] doesn't have a client Queryable?", server);
                    continue;
                }

                final MultipleSpecificSegmentSpec segmentSpec = new MultipleSpecificSegmentSpec(descriptors);

                final Sequence<T> resultSeqToAdd;
                if (!server.isAssignable() || !populateCache || isBySegment) { // Direct server queryable
                    if (!isBySegment) {
                        resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec),
                                responseContext);
                    } else {
                        // bySegment queries need to be de-serialized, see DirectDruidClient.run()

                        @SuppressWarnings("unchecked")
                        final Query<Result<BySegmentResultValueClass<T>>> bySegmentQuery = (Query<Result<BySegmentResultValueClass<T>>>) query;

                        @SuppressWarnings("unchecked")
                        final Sequence<Result<BySegmentResultValueClass<T>>> resultSequence = clientQueryable
                                .run(bySegmentQuery.withQuerySegmentSpec(segmentSpec), responseContext);

                        resultSeqToAdd = (Sequence) Sequences.map(resultSequence,
                                new Function<Result<BySegmentResultValueClass<T>>, Result<BySegmentResultValueClass<T>>>() {
                                    @Override
                                    public Result<BySegmentResultValueClass<T>> apply(
                                            Result<BySegmentResultValueClass<T>> input) {
                                        final BySegmentResultValueClass<T> bySegmentValue = input.getValue();
                                        return new Result<>(input.getTimestamp(),
                                                new BySegmentResultValueClass<T>(
                                                        Lists.transform(bySegmentValue.getResults(),
                                                                toolChest.makePreComputeManipulatorFn(query,
                                                                        MetricManipulatorFns.deserializing())),
                                                        bySegmentValue.getSegmentId(),
                                                        bySegmentValue.getInterval()));
                                    }
                                });
                    }
                } else { // Requires some manipulation on broker side
                    @SuppressWarnings("unchecked")
                    final Sequence<Result<BySegmentResultValueClass<T>>> runningSequence = clientQueryable
                            .run(rewrittenQuery.withQuerySegmentSpec(segmentSpec), responseContext);
                    resultSeqToAdd = toolChest.mergeSequencesUnordered(
                            Sequences.<Result<BySegmentResultValueClass<T>>, Sequence<T>>map(runningSequence,
                                    new Function<Result<BySegmentResultValueClass<T>>, Sequence<T>>() {
                                        private final Function<T, Object> cacheFn = strategy.prepareForCache();

                                        // Acctually do something with the results
                                        @Override
                                        public Sequence<T> apply(Result<BySegmentResultValueClass<T>> input) {
                                            final BySegmentResultValueClass<T> value = input.getValue();
                                            final CachePopulator cachePopulator = cachePopulatorMap
                                                    .get(String.format("%s_%s", value.getSegmentId(),
                                                            value.getInterval()));

                                            final Queue<ListenableFuture<Object>> cacheFutures = new ConcurrentLinkedQueue<>();

                                            return Sequences.<T>withEffect(Sequences.<T, T>map(
                                                    Sequences.<T, T>map(Sequences.<T>simple(value.getResults()),
                                                            new Function<T, T>() {
                                                                @Override
                                                                public T apply(final T input) {
                                                                    if (cachePopulator != null) {
                                                                        // only compute cache data if populating cache
                                                                        cacheFutures
                                                                                .add(backgroundExecutorService
                                                                                        .submit(new Callable<Object>() {
                                                                                            @Override
                                                                                            public Object call() {
                                                                                                return cacheFn
                                                                                                        .apply(input);
                                                                                            }
                                                                                        }));
                                                                    }
                                                                    return input;
                                                                }
                                                            }),
                                                    toolChest.makePreComputeManipulatorFn(
                                                            // Ick... most makePreComputeManipulatorFn directly cast to their ToolChest query type of choice
                                                            // This casting is sub-optimal, but hasn't caused any major problems yet...
                                                            (Query) rewrittenQuery,
                                                            MetricManipulatorFns.deserializing())),
                                                    new Runnable() {
                                                        @Override
                                                        public void run() {
                                                            if (cachePopulator != null) {
                                                                Futures.addCallback(
                                                                        Futures.allAsList(cacheFutures),
                                                                        new FutureCallback<List<Object>>() {
                                                                            @Override
                                                                            public void onSuccess(
                                                                                    List<Object> cacheData) {
                                                                                cachePopulator
                                                                                        .populate(cacheData);
                                                                                // Help out GC by making sure all references are gone
                                                                                cacheFutures.clear();
                                                                            }

                                                                            @Override
                                                                            public void onFailure(
                                                                                    Throwable throwable) {
                                                                                log.error(throwable,
                                                                                        "Background caching failed");
                                                                            }
                                                                        }, backgroundExecutorService);
                                                            }
                                                        }
                                                    }, MoreExecutors.sameThreadExecutor());// End withEffect
                                        }
                                    }));
                }

                listOfSequences.add(resultSeqToAdd);
            }
        }
    }// End of Supplier
    );
}

From source file:org.sakaiproject.nakamura.lite.content.InternalContent.java

/**
 * @return an iterable for all children of this content item.
 *//* w  w  w .j  av  a  2  s  .co  m*/
public Iterable<Content> listChildren() {
    if (newcontent) {
        return Iterables.emptyIterable();
    }
    return new Iterable<Content>() {

        public Iterator<Content> iterator() {
            try {
                return contentManager.listChildren(path);
            } catch (StorageClientException e) {
                LOGGER.error(e.getMessage(), e);
            }
            return Iterators.emptyIterator();
        }
    };
}

From source file:org.sakaiproject.nakamura.lite.content.InternalContent.java

/**
 * @return an iterable of all relative child paths of this object.
 *//*from   w w  w .  j ava  2  s. c  o m*/
public Iterable<String> listChildPaths() {
    if (newcontent) {
        return Iterables.emptyIterable();
    }
    return new Iterable<String>() {

        public Iterator<String> iterator() {
            try {
                return contentManager.listChildPaths(path);
            } catch (StorageClientException e) {
                LOGGER.error(e.getMessage(), e);
            }
            return Iterators.emptyIterator();
        }
    };
}

From source file:org.apache.marmotta.commons.sesame.repository.ResourceUtils.java

/**
 * List resources with the given prefix/*from ww w.  jav  a  2 s  . c o  m*/
 *
 * @param prefix the prefix
 * @param offset
 * @param limit
 */
public static Iterable<URI> listResourcesByPrefix(final RepositoryConnection con, final String prefix,
        final int offset, final int limit) {
    final ResourceConnection rcon = getWrappedResourceConnection(con);

    if (rcon != null) {
        return new Iterable<URI>() {
            @Override
            public Iterator<URI> iterator() {
                try {
                    Iterator<URI> result = ResultUtils.unwrap(rcon.getResources(prefix));

                    Iterators.advance(result, offset);

                    if (limit > 0) {
                        return Iterators.limit(result, limit);
                    } else {
                        return result;
                    }
                } catch (RepositoryException e) {
                    ExceptionUtils.handleRepositoryException(e, ResourceUtils.class);
                    return Iterators.emptyIterator();
                }

            }
        };
    } else {
        // no direct prefix listing support, need to filter the listResources result
        return new Iterable<URI>() {
            @Override
            public Iterator<URI> iterator() {
                Iterator<URI> result = Iterators
                        .transform(Iterators.filter(listResources(con).iterator(), new Predicate<Resource>() {
                            @Override
                            public boolean apply(Resource input) {
                                return input instanceof URI && input.stringValue().startsWith(prefix);
                            }
                        }), new Function<Resource, URI>() {
                            @Override
                            public URI apply(Resource input) {
                                return (URI) input;
                            }
                        });

                Iterators.advance(result, offset);

                if (limit > 0) {
                    return Iterators.limit(result, limit);
                } else {
                    return result;
                }
            }
        };
    }
}

From source file:org.apache.jackrabbit.oak.spi.security.authentication.external.impl.jmx.Delegatee.java

@Nonnull
private Iterator<String> internalListOrphanedIdentities() {
    try {//  w w w .j a va 2 s. com
        Iterator<SyncedIdentity> it = handler.listIdentities(userMgr);
        return Iterators.filter(Iterators.transform(it, new Function<SyncedIdentity, String>() {
            @Nullable
            @Override
            public String apply(@Nullable SyncedIdentity syncedIdentity) {
                if (syncedIdentity != null && isMyIDP(syncedIdentity)) {
                    ExternalIdentityRef ref = syncedIdentity.getExternalIdRef();
                    try {
                        ExternalIdentity extId = (ref == null) ? null : idp.getIdentity(ref);
                        if (extId == null) {
                            return syncedIdentity.getId();
                        }
                    } catch (ExternalIdentityException e) {
                        log.error("Error while fetching external identity {}", syncedIdentity, e);
                    }
                }
                return null;
            }
        }), Predicates.notNull());
    } catch (RepositoryException e) {
        log.error("Error while listing orphaned users", e);
        return Iterators.emptyIterator();
    }
}

From source file:com.digitalpetri.opcua.sdk.server.subscriptions.Subscription.java

private void returnNotifications(ServiceRequest<PublishRequest, PublishResponse> service) {
    LinkedHashSet<BaseMonitoredItem<?>> items = new LinkedHashSet<>();

    lastIterator.forEachRemaining(items::add);

    itemsById.values().stream().filter(item -> item.hasNotifications() || item.isTriggered())
            .forEach(items::add);/* w w w. ja  va 2  s.  c  o m*/

    PeekingIterator<BaseMonitoredItem<?>> iterator = Iterators.peekingIterator(items.iterator());

    gatherAndSend(iterator, Optional.of(service));

    lastIterator = iterator.hasNext() ? iterator : Iterators.emptyIterator();
}

From source file:org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore.java

@Override
public Iterator<String> resolveChunks(String blobId) throws IOException {
    if (!InMemoryDataRecord.isInstance(blobId)) {
        return Iterators.singletonIterator(blobId);
    }//from   www  . ja  v  a 2s  .  co m
    return Iterators.emptyIterator();
}

From source file:org.apache.uima.lucas.indexer.analysis.AnnotationTokenStream.java

protected void initializeIterators() {
    annotationIterator = Iterators.filter(jCas.getAnnotationIndex(annotationType).iterator(),
            new NotNullPredicate<Annotation>());

    if (!annotationIterator.hasNext()) {
        featureStructureIterator = Iterators.emptyIterator();
        featureValueIterator = Iterators.emptyIterator();
        return;//from  www.ja  v  a2s .  c om
    }

    currentAnnotation = (Annotation) annotationIterator.next();
    featureStructureIterator = createFeatureStructureIterator(currentAnnotation, featurePath);
    if (!featureStructureIterator.hasNext()) {
        featureValueIterator = Iterators.emptyIterator();
        return;
    }

    FeatureStructure featureStructure = featureStructureIterator.next();
    featureValueIterator = createFeatureValueIterator(featureStructure, featureNames);
}

From source file:org.apache.phoenix.execute.MutationState.java

public Iterator<Pair<byte[], List<Mutation>>> toMutations(final boolean includeMutableIndexes) {
    final Iterator<Map.Entry<TableRef, Map<ImmutableBytesPtr, RowMutationState>>> iterator = this.mutations
            .entrySet().iterator();/* w w w.  j  a  v a  2s. co m*/
    if (!iterator.hasNext()) {
        return Iterators.emptyIterator();
    }
    Long scn = connection.getSCN();
    final long timestamp = scn == null ? HConstants.LATEST_TIMESTAMP : scn;
    return new Iterator<Pair<byte[], List<Mutation>>>() {
        private Map.Entry<TableRef, Map<ImmutableBytesPtr, RowMutationState>> current = iterator.next();
        private Iterator<Pair<byte[], List<Mutation>>> innerIterator = init();

        private Iterator<Pair<byte[], List<Mutation>>> init() {
            return addRowMutations(current.getKey(), current.getValue(), timestamp, includeMutableIndexes);
        }

        @Override
        public boolean hasNext() {
            return innerIterator.hasNext() || iterator.hasNext();
        }

        @Override
        public Pair<byte[], List<Mutation>> next() {
            if (!innerIterator.hasNext()) {
                current = iterator.next();
            }
            return innerIterator.next();
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }

    };
}