Example usage for com.google.common.collect Iterators emptyIterator

List of usage examples for com.google.common.collect Iterators emptyIterator

Introduction

In this page you can find the example usage for com.google.common.collect Iterators emptyIterator.

Prototype

@Deprecated
public static <T> UnmodifiableIterator<T> emptyIterator() 

Source Link

Document

Returns the empty iterator.

Usage

From source file:org.apache.phoenix.iterate.ExplainTable.java

private void appendScanRow(StringBuilder buf, Bound bound) {
    ScanRanges scanRanges = context.getScanRanges();
    // TODO: review this and potentially intersect the scan ranges
    // with the minMaxRange in ScanRanges to prevent having to do all this.
    KeyRange minMaxRange = scanRanges.getMinMaxRange();
    Iterator<byte[]> minMaxIterator = Iterators.emptyIterator();
    if (minMaxRange != KeyRange.EVERYTHING_RANGE) {
        RowKeySchema schema = tableRef.getTable().getRowKeySchema();
        if (!minMaxRange.isUnbound(bound)) {
            minMaxIterator = new RowKeyValueIterator(schema, minMaxRange.getRange(bound));
        }/*from ww w. j a v  a2s . c o  m*/
    }
    boolean forceSkipScan = this.hint.hasHint(Hint.SKIP_SCAN);
    int nRanges = forceSkipScan ? scanRanges.getRanges().size() : scanRanges.getBoundSlotCount();
    for (int i = 0, minPos = 0; minPos < nRanges || minMaxIterator.hasNext(); i++) {
        List<KeyRange> ranges = minPos >= nRanges ? EVERYTHING : scanRanges.getRanges().get(minPos++);
        KeyRange range = bound == Bound.LOWER ? ranges.get(0) : ranges.get(ranges.size() - 1);
        byte[] b = range.getRange(bound);
        Boolean isNull = KeyRange.IS_NULL_RANGE == range ? Boolean.TRUE
                : KeyRange.IS_NOT_NULL_RANGE == range ? Boolean.FALSE : null;
        if (minMaxIterator.hasNext()) {
            byte[] bMinMax = minMaxIterator.next();
            int cmp = Bytes.compareTo(bMinMax, b) * (bound == Bound.LOWER ? 1 : -1);
            if (cmp > 0) {
                minPos = nRanges;
                b = bMinMax;
                isNull = null;
            } else if (cmp < 0) {
                minMaxIterator = Iterators.emptyIterator();
            }
        }
        appendPKColumnValue(buf, b, isNull, i);
        buf.append(',');
    }
}

From source file:org.apache.calcite.util.Pair.java

/**
 * Returns an iterator that iterates over (i, i + 1) pairs in an iterable.
 *
 * <p>For example, {@code adjacents([3, 5, 7])} returns [(3, 5), (5, 7)].</p>
 *
 * @param iterable Source collection//from w  w  w. j  a va  2s.  com
 * @param <T> Element type
 * @return Iterable over adjacent element pairs
 */
public static <T> Iterable<Pair<T, T>> adjacents(final Iterable<T> iterable) {
    return new Iterable<Pair<T, T>>() {
        public Iterator<Pair<T, T>> iterator() {
            final Iterator<T> iterator = iterable.iterator();
            if (!iterator.hasNext()) {
                return Iterators.emptyIterator();
            }
            final T first = iterator.next();
            return new Iterator<Pair<T, T>>() {
                T previous = first;

                public boolean hasNext() {
                    return iterator.hasNext();
                }

                public Pair<T, T> next() {
                    final T current = iterator.next();
                    final Pair<T, T> pair = of(previous, current);
                    previous = current;
                    return pair;
                }

                public void remove() {
                    throw new UnsupportedOperationException("remove");
                }
            };
        }
    };
}

From source file:org.fao.geonet.services.metadata.Publish.java

private Iterator<String> getIds(ConfigurableApplicationContext appContext, UserSession userSession,
        final String commaSeparatedIds) {
    final DataManager dataManager = appContext.getBean(DataManager.class);

    if (commaSeparatedIds == null) {
        if (userSession != null) {
            SelectionManager sm = SelectionManager.getManager(userSession);
            final Iterator<String> selectionIter = sm.getSelection(SelectionManager.SELECTION_METADATA)
                    .iterator();/* w w  w. j a va2 s  . c om*/
            return Iterators.transform(selectionIter, new Function<String, String>() {
                @Nullable
                @Override
                public String apply(String uuid) {
                    try {
                        return dataManager.getMetadataId(uuid);
                    } catch (Exception e) {
                        return null;
                    }
                }
            });
        } else {
            return Iterators.emptyIterator();
        }
    } else {
        return new Iterator<String>() {
            final StringTokenizer tokenizer = new StringTokenizer(commaSeparatedIds, ",", false);

            @Override
            public boolean hasNext() {
                return tokenizer.hasMoreElements();
            }

            @Override
            public String next() {
                return tokenizer.nextToken();
            }

            @Override
            public void remove() {
                throw new UnsupportedOperationException();
            }
        };
    }
}

From source file:org.sakaiproject.nakamura.lite.storage.mem.MemoryStorageClient.java

public DisposableIterator<Map<String, Object>> find(String keySpace, String columnFamily,
        Map<String, Object> properties) {
    List<Set<String>> matchingSets = Lists.newArrayList();
    for (Entry<String, Object> e : properties.entrySet()) {
        Object v = e.getValue();// w  w  w . j a  v a 2s . c  om
        String k = e.getKey();
        if (shouldIndex(keySpace, columnFamily, k)) {
            if (v != null) {
                @SuppressWarnings("unchecked")
                Set<String> matches = (Set<String>) store
                        .get(keyHash(keySpace, columnFamily, e.getKey(), e.getValue()));
                LOGGER.debug("Searching for {} found {} ",
                        keyHash(keySpace, columnFamily, e.getKey(), e.getValue()), matches);
                if (matches != null) {
                    matchingSets.add(matches);
                }
            }
        } else {
            LOGGER.warn("Search on {}:{} is not supported, filter dropped ", columnFamily, k);
        }
    }

    // find the union of all matching sets, using set views to build a tree of sets. This will be lazy iterating.
    Set<String> setOfRowHashes = null;
    for (Set<String> m : matchingSets) {
        if (setOfRowHashes == null) {
            setOfRowHashes = m;
        } else {
            setOfRowHashes = Sets.intersection(setOfRowHashes, m);
        }
    }
    LOGGER.debug("Matching Rowhashes is {} ", setOfRowHashes);

    Iterator<String> iterator = null;
    if (setOfRowHashes == null) {
        iterator = Iterators.emptyIterator();
    } else {
        iterator = setOfRowHashes.iterator();
    }
    final Iterator<String> matchedRowIds = iterator;
    return new PreemptiveIterator<Map<String, Object>>() {

        private Map<String, Object> nextMap;

        @SuppressWarnings("unchecked")
        @Override
        protected boolean internalHasNext() {
            while (matchedRowIds.hasNext()) {
                nextMap = (Map<String, Object>) store.get(matchedRowIds.next());
                if (nextMap != null) {
                    return true;
                }
            }
            nextMap = null;
            close();
            return false;
        }

        @Override
        protected Map<String, Object> internalNext() {
            return nextMap;
        }
    };
}

From source file:io.wcm.config.editor.impl.EditorParameterProvider.java

private Iterator<Configuration> getConfigurations(SlingHttpServletRequest request) {
    if (configurationFinder != null) {
        Resource resource = request.getResource();
        return configurationFinder.findAll(resource);
    }//from   w  w w.  j av  a2 s .co  m

    return Iterators.emptyIterator();
}

From source file:org.apache.phoenix.compile.WhereCompiler.java

/**
 * Sets the start/stop key range based on the whereClause expression.
 * @param context the shared context during query compilation
 * @param whereClause the final where clause expression.
 *//*ww  w.  j a  v  a 2 s.  c o  m*/
private static void setScanFilter(StatementContext context, FilterableStatement statement,
        Expression whereClause, boolean disambiguateWithFamily, boolean hashJoinOptimization) {
    Scan scan = context.getScan();

    if (LiteralExpression.isBooleanFalseOrNull(whereClause)) {
        context.setScanRanges(ScanRanges.NOTHING);
    } else if (whereClause != null && !LiteralExpression.isTrue(whereClause) && !hashJoinOptimization) {
        Filter filter = null;
        final Counter counter = new Counter();
        whereClause.accept(new KeyValueExpressionVisitor() {

            @Override
            public Iterator<Expression> defaultIterator(Expression node) {
                // Stop traversal once we've found multiple KeyValue columns
                if (counter.getCount() == Counter.Count.MULTIPLE) {
                    return Iterators.emptyIterator();
                }
                return super.defaultIterator(node);
            }

            @Override
            public Void visit(KeyValueColumnExpression expression) {
                counter.increment(expression);
                return null;
            }
        });
        switch (counter.getCount()) {
        case NONE:
            PTable table = context.getResolver().getTables().get(0).getTable();
            byte[] essentialCF = table.getType() == PTableType.VIEW ? ByteUtil.EMPTY_BYTE_ARRAY
                    : SchemaUtil.getEmptyColumnFamily(table);
            filter = new RowKeyComparisonFilter(whereClause, essentialCF);
            break;
        case SINGLE:
            filter = disambiguateWithFamily ? new SingleCFCQKeyValueComparisonFilter(whereClause)
                    : new SingleCQKeyValueComparisonFilter(whereClause);
            break;
        case MULTIPLE:
            filter = disambiguateWithFamily ? new MultiCFCQKeyValueComparisonFilter(whereClause)
                    : new MultiCQKeyValueComparisonFilter(whereClause);
            break;
        }
        scan.setFilter(filter);
    }

    ScanRanges scanRanges = context.getScanRanges();
    if (scanRanges.useSkipScanFilter()) {
        ScanUtil.andFilterAtBeginning(scan, scanRanges.getSkipScanFilter());
    }
}

From source file:org.apache.marmotta.commons.sesame.repository.ResourceUtils.java

/**
 * List all resources contained in the KiWi System, regardless of knowledge space or type. Since this
 * operation works directly on the triple store, there is no guarantee the result is free of duplicates.
 * In case the underlying connection does not directly support listing resources (i.e. is not an instance of
 * ResourceConnection), the method will iterate over all triples and return their subjects
 *
 * @return//w  w w  . jav a 2s  .co  m
 */
public static Iterable<Resource> listResources(RepositoryConnection con) {
    final ResourceConnection rcon = getWrappedResourceConnection(con);

    if (rcon != null) {
        return new Iterable<Resource>() {
            @Override
            public Iterator<Resource> iterator() {
                try {
                    return ResultUtils.unwrap(rcon.getResources());
                } catch (RepositoryException e) {
                    ExceptionUtils.handleRepositoryException(e, ResourceUtils.class);
                    return Iterators.emptyIterator();
                }
            }
        };
    } else {
        return listSubjectsInternal(con, null, null, null);
    }
}

From source file:org.polarsys.reqcycle.traceability.cache.emfbased.CacheTraceabilityEngine.java

@Override
protected Iterator<Pair<Link, Reachable>> doGetTraceability(Reachable source, DIRECTION direction,
        Predicate<Pair<Link, Reachable>> scope) {
    TraceableElement s = getTraceableElement(source);
    if (s != null) {
        IPicker picker = getPicker(direction, scope);
        Iterable<IPicker> pickers = Arrays.asList(new IPicker[] { picker });
        IteratorFactory f = new IteratorFactory(pickers);
        f.activateWidthWisdom();/*from  w w  w .  j a  v a 2 s  .c  o m*/
        f.activateRedundancyAwareness();
        Iterable<Object> iterable = f.createIterable(s);
        Iterator<Object> i = iterable.iterator();
        i.next();
        // create a list to "copy" data to the caller
        return Lists.newArrayList(Iterators.transform(i, new Function<Object, Pair<Link, Reachable>>() {
            public Pair<Link, Reachable> apply(Object o) {
                return (Pair<Link, Reachable>) o;
            }
        })).iterator();
    }
    return Iterators.emptyIterator();
}

From source file:com.metamx.druid.client.CachingClusteredClient.java

@Override
public Sequence<T> run(final Query<T> query) {
    final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query);
    final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query);

    final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap();

    final List<Pair<DateTime, byte[]>> cachedResults = Lists.newArrayList();
    final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap();

    final boolean useCache = Boolean.parseBoolean(query.getContextValue("useCache", "true"))
            && strategy != null;
    final boolean populateCache = Boolean.parseBoolean(query.getContextValue("populateCache", "true"))
            && strategy != null;
    final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false"));

    ImmutableMap.Builder<String, String> contextBuilder = new ImmutableMap.Builder<String, String>();

    final String priority = query.getContextValue("priority", "0");
    contextBuilder.put("priority", priority);

    if (populateCache) {
        contextBuilder.put("bySegment", "true");
    }/*from   w  w  w . j  ava2  s. com*/
    contextBuilder.put("intermediate", "true");

    final Query<T> rewrittenQuery = query.withOverriddenContext(contextBuilder.build());

    VersionedIntervalTimeline<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource());
    if (timeline == null) {
        return Sequences.empty();
    }

    // build set of segments to query
    Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet();

    List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList();

    for (Interval interval : rewrittenQuery.getIntervals()) {
        serversLookup.addAll(timeline.lookup(interval));
    }

    // Let tool chest filter out unneeded segments
    final List<TimelineObjectHolder<String, ServerSelector>> filteredServersLookup = toolChest
            .filterSegments(query, serversLookup);

    for (TimelineObjectHolder<String, ServerSelector> holder : filteredServersLookup) {
        for (PartitionChunk<ServerSelector> chunk : holder.getObject()) {
            ServerSelector selector = chunk.getObject();
            final SegmentDescriptor descriptor = new SegmentDescriptor(holder.getInterval(),
                    holder.getVersion(), chunk.getChunkNumber());

            segments.add(Pair.of(selector, descriptor));
        }
    }

    final byte[] queryCacheKey;
    if (strategy != null) {
        queryCacheKey = strategy.computeCacheKey(query);
    } else {
        queryCacheKey = null;
    }

    // Pull cached segments from cache and remove from set of segments to query
    if (useCache && queryCacheKey != null) {
        Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys = Maps.newHashMap();
        for (Pair<ServerSelector, SegmentDescriptor> e : segments) {
            cacheKeys.put(e, computeSegmentCacheKey(e.lhs.getSegment().getIdentifier(), e.rhs, queryCacheKey));
        }

        Map<Cache.NamedKey, byte[]> cachedValues = cache.getBulk(cacheKeys.values());

        for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry : cacheKeys.entrySet()) {
            Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey();
            Cache.NamedKey segmentCacheKey = entry.getValue();

            final ServerSelector selector = segment.lhs;
            final SegmentDescriptor descriptor = segment.rhs;
            final Interval segmentQueryInterval = descriptor.getInterval();

            final byte[] cachedValue = cachedValues.get(segmentCacheKey);

            if (cachedValue != null) {
                cachedResults.add(Pair.of(segmentQueryInterval.getStart(), cachedValue));

                // remove cached segment from set of segments to query
                segments.remove(segment);
            } else {
                final String segmentIdentifier = selector.getSegment().getIdentifier();
                cachePopulatorMap.put(String.format("%s_%s", segmentIdentifier, segmentQueryInterval),
                        new CachePopulator(cache, objectMapper, segmentCacheKey));
            }
        }
    }

    // Compile list of all segments not pulled from cache
    for (Pair<ServerSelector, SegmentDescriptor> segment : segments) {
        final QueryableDruidServer queryableDruidServer = segment.lhs.pick();

        if (queryableDruidServer == null) {
            log.error("No servers found for %s?! How can this be?!", segment.rhs);
        } else {
            final DruidServer server = queryableDruidServer.getServer();
            List<SegmentDescriptor> descriptors = serverSegments.get(server);

            if (descriptors == null) {
                descriptors = Lists.newArrayList();
                serverSegments.put(server, descriptors);
            }

            descriptors.add(segment.rhs);
        }
    }

    return new LazySequence<T>(new Supplier<Sequence<T>>() {
        @Override
        public Sequence<T> get() {
            ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences = Lists.newArrayList();

            addSequencesFromServer(listOfSequences);
            addSequencesFromCache(listOfSequences);

            Collections.sort(listOfSequences,
                    Ordering.natural().onResultOf(Pair.<DateTime, Sequence<T>>lhsFn()));

            final Sequence<Sequence<T>> seq = Sequences
                    .simple(Iterables.transform(listOfSequences, Pair.<DateTime, Sequence<T>>rhsFn()));
            if (strategy == null) {
                return toolChest.mergeSequences(seq);
            } else {
                return strategy.mergeSequences(seq);
            }
        }

        private void addSequencesFromCache(ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) {
            if (strategy == null) {
                return;
            }

            final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache();
            final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz();
            for (Pair<DateTime, byte[]> cachedResultPair : cachedResults) {
                final byte[] cachedResult = cachedResultPair.rhs;
                Sequence<Object> cachedSequence = new BaseSequence<Object, Iterator<Object>>(
                        new BaseSequence.IteratorMaker<Object, Iterator<Object>>() {
                            @Override
                            public Iterator<Object> make() {
                                try {
                                    if (cachedResult.length == 0) {
                                        return Iterators.emptyIterator();
                                    }

                                    return objectMapper.readValues(
                                            objectMapper.getJsonFactory().createJsonParser(cachedResult),
                                            cacheObjectClazz);
                                } catch (IOException e) {
                                    throw Throwables.propagate(e);
                                }
                            }

                            @Override
                            public void cleanup(Iterator<Object> iterFromMake) {
                            }
                        });
                listOfSequences.add(
                        Pair.of(cachedResultPair.lhs, Sequences.map(cachedSequence, pullFromCacheFunction)));
            }
        }

        @SuppressWarnings("unchecked")
        private void addSequencesFromServer(ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) {
            for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry : serverSegments.entrySet()) {
                final DruidServer server = entry.getKey();
                final List<SegmentDescriptor> descriptors = entry.getValue();

                final QueryRunner clientQueryable = serverView.getQueryRunner(server);
                if (clientQueryable == null) {
                    log.makeAlert("WTF!? server[%s] doesn't have a client Queryable?", server).emit();
                    continue;
                }

                final Sequence<T> resultSeqToAdd;
                final MultipleSpecificSegmentSpec segmentSpec = new MultipleSpecificSegmentSpec(descriptors);
                List<Interval> intervals = segmentSpec.getIntervals();

                if ("realtime".equals(server.getType()) || !populateCache || isBySegment) {
                    resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec));
                } else {
                    resultSeqToAdd = toolChest.mergeSequences(
                            Sequences.map(clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec)),
                                    new Function<Object, Sequence<T>>() {
                                        private final Function<T, Object> prepareForCache = strategy
                                                .prepareForCache();

                                        @Override
                                        public Sequence<T> apply(Object input) {
                                            Result<Object> result = (Result<Object>) input;
                                            final BySegmentResultValueClass<T> value = (BySegmentResultValueClass<T>) result
                                                    .getValue();
                                            String segmentIdentifier = value.getSegmentId();
                                            final Iterable<T> segmentResults = value.getResults();

                                            cachePopulatorMap
                                                    .get(String.format("%s_%s", segmentIdentifier,
                                                            value.getInterval()))
                                                    .populate(Iterables.transform(segmentResults,
                                                            prepareForCache));

                                            return Sequences.simple(Iterables.transform(segmentResults,
                                                    toolChest.makeMetricManipulatorFn(rewrittenQuery,
                                                            new MetricManipulationFn() {
                                                                @Override
                                                                public Object manipulate(
                                                                        AggregatorFactory factory,
                                                                        Object object) {
                                                                    return factory.deserialize(object);
                                                                }
                                                            })));
                                        }
                                    }));
                }

                listOfSequences.add(Pair.of(intervals.get(0).getStart(), resultSeqToAdd));
            }
        }
    });
}

From source file:org.apache.calcite.util.Pair.java

/**
 * Returns an iterator that iterates over (0, i) pairs in an iterable for
 * i &gt; 0./* w w w .j  a  va  2s .  c o  m*/
 *
 * <p>For example, {@code firstAnd([3, 5, 7])} returns [(3, 5), (3, 7)].</p>
 *
 * @param iterable Source collection
 * @param <T> Element type
 * @return Iterable over pairs of the first element and all other elements
 */
public static <T> Iterable<Pair<T, T>> firstAnd(final Iterable<T> iterable) {
    return new Iterable<Pair<T, T>>() {
        public Iterator<Pair<T, T>> iterator() {
            final Iterator<T> iterator = iterable.iterator();
            if (!iterator.hasNext()) {
                return Iterators.emptyIterator();
            }
            final T first = iterator.next();
            return new Iterator<Pair<T, T>>() {
                public boolean hasNext() {
                    return iterator.hasNext();
                }

                public Pair<T, T> next() {
                    return of(first, iterator.next());
                }

                public void remove() {
                    throw new UnsupportedOperationException("remove");
                }
            };
        }
    };
}