Example usage for com.google.common.collect Iterators transform

List of usage examples for com.google.common.collect Iterators transform

Introduction

In this page you can find the example usage for com.google.common.collect Iterators transform.

Prototype

public static <F, T> Iterator<T> transform(final Iterator<F> fromIterator,
        final Function<? super F, ? extends T> function) 

Source Link

Document

Returns an iterator that applies function to each element of fromIterator .

Usage

From source file:org.geogit.cli.plumbing.LsTree.java

@Override
public void runInternal(GeogitCLI cli) throws IOException {
    String ref;/*from w w w. ja v a2s .  co  m*/
    if (refList.isEmpty()) {
        ref = null;
    } else {
        ref = refList.get(0);
    }
    Strategy lsStrategy = Strategy.CHILDREN;
    if (recursive) {
        if (includeTrees) {
            lsStrategy = Strategy.DEPTHFIRST;
        } else if (onlyTrees) {
            lsStrategy = Strategy.DEPTHFIRST_ONLY_TREES;
        } else {
            lsStrategy = Strategy.DEPTHFIRST_ONLY_FEATURES;
        }
    } else {
        if (onlyTrees) {
            lsStrategy = Strategy.TREES_ONLY;
        }
    }
    Iterator<NodeRef> iter = cli.getGeogit().command(LsTreeOp.class).setReference(ref).setStrategy(lsStrategy)
            .call();

    final ConsoleReader console = cli.getConsole();

    Function<NodeRef, CharSequence> printFunctor = new Function<NodeRef, CharSequence>() {

        @Override
        public CharSequence apply(NodeRef input) {
            if (!verbose) {
                return input.path();
            }
            StringBuilder sb = new StringBuilder();
            sb.append(input.getMetadataId().toString()).append(' ')
                    .append(input.getType().toString().toLowerCase()).append(' ')
                    .append(input.objectId().toString()).append(' ').append(input.path());
            return sb;
        }
    };

    Iterator<CharSequence> lines = Iterators.transform(iter, printFunctor);

    while (lines.hasNext()) {
        console.println(lines.next());
    }
    console.flush();
}

From source file:com.thinkbiganalytics.metadata.modeshape.sla.JcrObligation.java

@Override
public Set<Metric> getMetrics() {
    try {/*from  ww  w.j  av  a 2 s  . c o m*/
        @SuppressWarnings("unchecked")
        Iterator<Node> itr = (Iterator<Node>) this.node.getNodes(METRICS);

        return Sets.newHashSet(Iterators.transform(itr, (metricNode) -> {
            return JcrUtil.getGenericJson(metricNode, JSON);
        }));
    } catch (RepositoryException e) {
        throw new MetadataRepositoryException("Failed to retrieve the metric nodes", e);
    }
}

From source file:org.fcrepo.kernel.utils.iterators.RdfStream.java

/**
 * Constructor that begins the stream with proffered statements.
 *
 * @param statements//w  w w. j  a  va 2  s  .  com
 */
@SafeVarargs
public <T extends Statement> RdfStream(final T... statements) {
    this(Iterators.transform(Iterators.forArray(statements), statement2triple));
}

From source file:io.crate.metadata.RoutineInfos.java

private Iterator<RoutineInfo> builtInTokenFilters() {
    return Iterators.transform(ftResolver.getBuiltInTokenFilters().iterator(),
            new Function<String, RoutineInfo>() {
                @Nullable//ww  w  .j  av a2 s .c  o m
                @Override
                public RoutineInfo apply(@Nullable String input) {
                    return new RoutineInfo(input, RoutineType.TOKEN_FILTER.getName());
                }
            });
}

From source file:co.cask.cdap.spark.app.SparkLogParser.java

@Override
public void run(JavaSparkExecutionContext sec) throws Exception {
    JavaSparkContext jsc = new JavaSparkContext();

    Map<String, String> runtimeArguments = sec.getRuntimeArguments();
    String inputFileSet = runtimeArguments.get("input");
    final String outputTable = runtimeArguments.get("output");

    JavaPairRDD<LongWritable, Text> input = sec.fromDataset(inputFileSet);

    final JavaPairRDD<String, String> aggregated = input
            .mapToPair(new PairFunction<Tuple2<LongWritable, Text>, LogKey, LogStats>() {
                @Override//  www  .  j  a  v  a  2 s. c  om
                public Tuple2<LogKey, LogStats> call(Tuple2<LongWritable, Text> input) throws Exception {
                    return SparkAppUsingGetDataset.parse(input._2());
                }
            }).reduceByKey(new Function2<LogStats, LogStats, LogStats>() {
                @Override
                public LogStats call(LogStats stats1, LogStats stats2) throws Exception {
                    return stats1.aggregate(stats2);
                }
            })
            .mapPartitionsToPair(new PairFlatMapFunction<Iterator<Tuple2<LogKey, LogStats>>, String, String>() {
                @Override
                public Iterable<Tuple2<String, String>> call(Iterator<Tuple2<LogKey, LogStats>> itor)
                        throws Exception {
                    final Gson gson = new Gson();
                    return Lists.newArrayList(Iterators.transform(itor,
                            new Function<Tuple2<LogKey, LogStats>, Tuple2<String, String>>() {
                                @Override
                                public Tuple2<String, String> apply(Tuple2<LogKey, LogStats> input) {
                                    return new Tuple2<>(gson.toJson(input._1()), gson.toJson(input._2()));
                                }
                            }));
                }
            });

    // Collect all data to driver and write to dataset directly. That's the intend of the test.
    sec.execute(new TxRunnable() {
        @Override
        public void run(DatasetContext context) throws Exception {
            KeyValueTable kvTable = context.getDataset(outputTable);
            for (Map.Entry<String, String> entry : aggregated.collectAsMap().entrySet()) {
                kvTable.write(entry.getKey(), entry.getValue());
            }
        }
    });
}

From source file:io.pravega.client.batch.impl.BatchClientImpl.java

private Iterator<SegmentInfo> listSegments(Stream stream, Date from) {
    // modify iteration above but starting with a timestamp and ending with a break
    Map<Segment, Long> segments = getAndHandleExceptions(controller.getSegmentsAtTime(
            new StreamImpl(stream.getScope(), stream.getStreamName()), from.getTime()), RuntimeException::new);
    SortedSet<Segment> result = new TreeSet<>();
    result.addAll(segments.keySet());/*www.j av  a  2 s . c  om*/
    result.addAll(getAndHandleExceptions(controller.getSuccessors(new StreamCut(stream, segments)),
            RuntimeException::new));
    return Iterators.transform(result.iterator(), s -> segmentToInfo(s));
}

From source file:org.locationtech.geogig.cli.porcelain.index.ListIndexes.java

@Override
protected void runInternal(GeogigCLI cli)
        throws InvalidParameterException, CommandFailedException, IOException {

    Repository repo = cli.getGeogig().getRepository();

    List<IndexInfo> indexInfos;
    if (treeName != null) {
        indexInfos = repo.indexDatabase().getIndexInfos(treeName);
    } else {/*from  w ww.j av a  2s.  com*/
        indexInfos = repo.indexDatabase().getIndexInfos();
    }

    Function<IndexInfo, CharSequence> printFunctor = new Function<IndexInfo, CharSequence>() {

        @Override
        public CharSequence apply(IndexInfo input) {
            StringBuilder sb = new StringBuilder();
            sb.append("Index [").append(input.getId()).append("]\n");
            sb.append("  ").append("Feature Type Tree:\t").append(input.getTreeName()).append("\n");
            sb.append("  ").append("Attribute:\t\t").append(input.getAttributeName()).append("\n");
            sb.append("  ").append("Index Type:\t\t").append(input.getIndexType()).append("\n");
            Map<String, Object> metadata = input.getMetadata();
            if (metadata.containsKey(IndexInfo.MD_QUAD_MAX_BOUNDS)) {
                sb.append("  ").append("Quad Tree Max Bounds:\t")
                        .append(metadata.get(IndexInfo.MD_QUAD_MAX_BOUNDS)).append("\n");
            }
            if (metadata.containsKey(IndexInfo.FEATURE_ATTRIBUTES_EXTRA_DATA)) {
                String[] extraAttributes = (String[]) metadata.get(IndexInfo.FEATURE_ATTRIBUTES_EXTRA_DATA);
                sb.append("  ").append("Extra Attributes:\t").append(Arrays.toString(extraAttributes))
                        .append("\n");
            }
            return sb;
        }
    };

    Iterator<CharSequence> lines = Iterators.transform(indexInfos.iterator(), printFunctor);

    Console console = cli.getConsole();
    while (lines.hasNext()) {
        console.println(lines.next());
    }
    console.flush();

}

From source file:org.apache.fluo.core.log.TracingTransaction.java

private String encRC(Collection<RowColumn> ret) {
    return Iterators.toString(Iterators.transform(ret.iterator(), new Function<RowColumn, String>() {
        @Override//from  w ww.  j ava 2  s  .  c  o m
        public String apply(RowColumn rc) {
            return Hex.encNonAscii(rc);
        }
    }));
}

From source file:org.polymap.core.model2.store.recordstore.RecordStoreUnitOfWork.java

@Override
public <T extends Entity> Collection find(Class<T> entityClass) {
    try {/* ww  w.ja  va 2s .  c  o m*/
        // XXX cache result for subsequent loadEntityState() (?)
        final ResultSet results = store
                .find(new SimpleQuery().eq(TYPE_KEY, entityClass.getName()).setMaxResults(Integer.MAX_VALUE));

        return new AbstractCollection() {

            public Iterator iterator() {
                return Iterators.transform(results.iterator(), new Function<IRecordState, Object>() {
                    public Object apply(IRecordState input) {
                        return input.id();
                    }
                });
            }

            public int size() {
                return results.count();
            }

            protected void finalize() throws Throwable {
                results.close();
            }
        };
    } catch (Exception e) {
        throw new ModelRuntimeException(e);
    }
}

From source file:net.myrrix.batch.common.iterator.sequencefile.SequenceFileDirIterator.java

/**
 * Constructor that uses either {@link FileSystem#listStatus(Path)} or
 * {@link FileSystem#globStatus(Path)} to obtain list of files to iterate over
 * (depending on pathType parameter)./*  w  w w .j  a  v  a2 s.c o m*/
 */
public SequenceFileDirIterator(Path path, PathType pathType, PathFilter filter, Comparator<FileStatus> ordering,
        final boolean reuseKeyValueInstances, final Configuration conf) throws IOException {

    FileStatus[] statuses;
    FileSystem fs = path.getFileSystem(conf);
    if (filter == null) {
        statuses = pathType == PathType.GLOB ? fs.globStatus(path) : fs.listStatus(path);
    } else {
        statuses = pathType == PathType.GLOB ? fs.globStatus(path, filter) : fs.listStatus(path, filter);
    }

    if (statuses == null) {
        statuses = NO_STATUSES;
    } else {
        if (ordering == null) {
            // If order does not matter, use a random order
            Collections.shuffle(Arrays.asList(statuses));
        } else {
            Arrays.sort(statuses, ordering);
        }
    }

    closer = Closer.create();

    Iterator<Iterator<Pair<K, V>>> fsIterators = Iterators.transform(Iterators.forArray(statuses),
            new Function<FileStatus, Iterator<Pair<K, V>>>() {
                @Override
                public Iterator<Pair<K, V>> apply(FileStatus from) {
                    try {
                        SequenceFileIterator<K, V> iterator = new SequenceFileIterator<K, V>(from.getPath(),
                                reuseKeyValueInstances, conf);
                        closer.register(iterator);
                        return iterator;
                    } catch (IOException ioe) {
                        throw new IllegalStateException(from.getPath().toString(), ioe);
                    }
                }
            });

    delegate = Iterators.concat(fsIterators);
}