List of usage examples for com.google.common.collect Iterators concat
public static <T> Iterator<T> concat(Iterator<? extends T> a, Iterator<? extends T> b)
From source file:org.apache.james.transport.mailets.ToRecipientFolder.java
@Override public void init() throws MessagingException { super.init(); sieveMailet = new SieveMailet(usersRepository, mailboxManager, ResourceLocatorImpl.instanciate(usersRepository, sieveRepository), getInitParameter(FOLDER_PARAMETER, "INBOX")); sieveMailet.init(new MailetConfig() { @Override/* ww w . j a v a 2s . co m*/ public String getInitParameter(String name) { if ("addDeliveryHeader".equals(name)) { return "Delivered-To"; } else if ("resetReturnPath".equals(name)) { return "true"; } else { return getMailetConfig().getInitParameter(name); } } @Override public Iterator<String> getInitParameterNames() { return Iterators.concat(getMailetConfig().getInitParameterNames(), Arrays.asList("addDeliveryHeader", "resetReturnPath").iterator()); } @Override public MailetContext getMailetContext() { return getMailetConfig().getMailetContext(); } @Override public String getMailetName() { return getMailetConfig().getMailetName(); } }); sieveMailet.setQuiet(getInitParameter("quiet", true)); sieveMailet.setConsume(getInitParameter(CONSUME_PARAMETER, false)); }
From source file:com.googlecode.efactory.building.NameAccessor.java
private Iterator<GlobalNameMapping> getGlobalNameMappings(EObject context) { Iterator<GlobalNameMapping> globalMappings = Find.allInResourceSet(context, GlobalNameMapping.class); globalMappings = Iterators.concat(globalMappings, SingletonIterator.create(defaultNameMapping)); return globalMappings; }
From source file:com.github.jonross.seq4j.Seq.java
/** * Wraps {@link Iterators#concat(Iterator)}; combines this sequence and another iterator * into one sequence.//from ww w. j a v a2 s.c o m */ Seq<T> concat(Iterator<? extends T> iter) { return seq(Iterators.concat(this, iter)); }
From source file:com.sk89q.worldedit.extent.reorder.MultiStageReorder.java
@Override public Operation commitBefore() { return new OperationQueue( new BlockMapEntryPlacer(getExtent(), Iterators.concat(stage1.iterator(), stage2.iterator())), new Stage3Committer()); }
From source file:org.locationtech.geogig.cli.plumbing.DiffTree.java
/** * Executes the diff-tree command with the specified options. *///from w w w . ja v a2 s.c o m @Override protected void runInternal(GeogigCLI cli) throws IOException { if (refSpec.size() > 2) { throw new CommandFailedException("Tree refspecs list is too long :" + refSpec); } if (treeStats && describe) { throw new CommandFailedException("Cannot use --describe and --tree-stats simultaneously"); } GeoGIG geogig = cli.getGeogig(); org.locationtech.geogig.api.plumbing.DiffTree diff = geogig .command(org.locationtech.geogig.api.plumbing.DiffTree.class); String oldVersion = resolveOldVersion(); String newVersion = resolveNewVersion(); diff.setOldVersion(oldVersion).setNewVersion(newVersion); Iterator<DiffEntry> diffEntries; if (paths.isEmpty()) { diffEntries = diff.setProgressListener(cli.getProgressListener()).call(); } else { diffEntries = Iterators.emptyIterator(); for (String path : paths) { Iterator<DiffEntry> moreEntries = diff.setPathFilter(path) .setProgressListener(cli.getProgressListener()).call(); diffEntries = Iterators.concat(diffEntries, moreEntries); } } DiffEntry diffEntry; HashMap<String, Long[]> stats = Maps.newHashMap(); while (diffEntries.hasNext()) { diffEntry = diffEntries.next(); StringBuilder sb = new StringBuilder(); String path = diffEntry.newPath() != null ? diffEntry.newPath() : diffEntry.oldPath(); if (describe) { sb.append(diffEntry.changeType().toString().charAt(0)).append(' ').append(path).append(LINE_BREAK); if (diffEntry.changeType() == ChangeType.MODIFIED) { FeatureDiff featureDiff = geogig.command(DiffFeature.class) .setNewVersion(Suppliers.ofInstance(diffEntry.getNewObject())) .setOldVersion(Suppliers.ofInstance(diffEntry.getOldObject())).call(); Map<PropertyDescriptor, AttributeDiff> diffs = featureDiff.getDiffs(); HashSet<PropertyDescriptor> diffDescriptors = Sets.newHashSet(diffs.keySet()); NodeRef noderef = diffEntry.changeType() != ChangeType.REMOVED ? diffEntry.getNewObject() : diffEntry.getOldObject(); RevFeatureType featureType = geogig.command(RevObjectParse.class) .setObjectId(noderef.getMetadataId()).call(RevFeatureType.class).get(); Optional<RevObject> obj = geogig.command(RevObjectParse.class).setObjectId(noderef.objectId()) .call(); RevFeature feature = (RevFeature) obj.get(); ImmutableList<Optional<Object>> values = feature.getValues(); ImmutableList<PropertyDescriptor> descriptors = featureType.sortedDescriptors(); int idx = 0; for (PropertyDescriptor descriptor : descriptors) { if (diffs.containsKey(descriptor)) { AttributeDiff ad = diffs.get(descriptor); sb.append(ad.getType().toString().charAt(0) + " " + descriptor.getName().toString() + LINE_BREAK); if (!ad.getType().equals(TYPE.ADDED)) { Object value = ad.getOldValue().orNull(); sb.append(TextValueSerializer.asString(Optional.fromNullable(value))); sb.append(LINE_BREAK); } if (!ad.getType().equals(TYPE.REMOVED)) { Object value = ad.getNewValue().orNull(); sb.append(TextValueSerializer.asString(Optional.fromNullable(value))); sb.append(LINE_BREAK); } diffDescriptors.remove(descriptor); } else { sb.append("U ").append(descriptor.getName().toString()).append(LINE_BREAK); sb.append(TextValueSerializer.asString(values.get(idx))).append(LINE_BREAK); } idx++; } for (PropertyDescriptor descriptor : diffDescriptors) { AttributeDiff ad = diffs.get(descriptor); sb.append(ad.getType().toString().charAt(0) + " " + descriptor.getName().toString() + LINE_BREAK); if (!ad.getType().equals(TYPE.ADDED)) { Object value = ad.getOldValue().orNull(); sb.append(TextValueSerializer.asString(Optional.fromNullable(value))); sb.append(LINE_BREAK); } if (!ad.getType().equals(TYPE.REMOVED)) { Object value = ad.getNewValue().orNull(); sb.append(TextValueSerializer.asString(Optional.fromNullable(value))); sb.append(LINE_BREAK); } } } else { NodeRef noderef = diffEntry.changeType() == ChangeType.ADDED ? diffEntry.getNewObject() : diffEntry.getOldObject(); RevFeatureType featureType = geogig.command(RevObjectParse.class) .setObjectId(noderef.getMetadataId()).call(RevFeatureType.class).get(); Optional<RevObject> obj = geogig.command(RevObjectParse.class).setObjectId(noderef.objectId()) .call(); RevFeature feature = (RevFeature) obj.get(); ImmutableList<Optional<Object>> values = feature.getValues(); int i = 0; for (Optional<Object> value : values) { sb.append(diffEntry.changeType().toString().charAt(0)); sb.append(' '); sb.append(featureType.sortedDescriptors().get(i).getName().toString()); sb.append(LINE_BREAK); sb.append(TextValueSerializer.asString(value)); sb.append(LINE_BREAK); i++; } sb.append(LINE_BREAK); } sb.append(LINE_BREAK); cli.getConsole().println(sb.toString()); } else if (treeStats) { String parent = NodeRef.parentPath(path); if (!stats.containsKey(parent)) { stats.put(parent, new Long[] { 0l, 0l, 0l }); } Long[] counts = stats.get(parent); if (diffEntry.changeType() == ChangeType.ADDED) { counts[0]++; } else if (diffEntry.changeType() == ChangeType.REMOVED) { counts[1]++; } else if (diffEntry.changeType() == ChangeType.MODIFIED) { counts[2]++; } } else { sb.append(path).append(' '); sb.append(diffEntry.oldObjectId().toString()); sb.append(' '); sb.append(diffEntry.newObjectId().toString()); cli.getConsole().println(sb.toString()); } } if (treeStats) { for (String path : stats.keySet()) { StringBuffer sb = new StringBuffer(); sb.append(path); Long[] counts = stats.get(path); for (int i = 0; i < counts.length; i++) { sb.append(" " + counts[i].toString()); } cli.getConsole().println(sb.toString()); } } }
From source file:com.google.gdt.eclipse.designer.hosted.classloader.GWTSharedClassLoader.java
@Override public Enumeration<URL> findResources(String name) throws IOException { final Enumeration<URL> superResources = super.findResources(name); final Enumeration<URL> moduleResources = stateTL.get().activeLoader.getResources(name); Iterator<URL> allResources = Iterators.concat(Iterators.forEnumeration(superResources), Iterators.forEnumeration(moduleResources)); return Iterators.asEnumeration(allResources); }
From source file:org.apache.cassandra.hadoop.cql3.LimitedLocalNodeFirstLocalBalancingPolicy.java
@Override public Iterator<Host> newQueryPlan(String keyspace, Statement statement) { List<Host> local = new ArrayList<>(1); List<Host> remote = new ArrayList<>(liveReplicaHosts.size()); for (Host liveReplicaHost : liveReplicaHosts) { if (isLocalHost(liveReplicaHost)) { local.add(liveReplicaHost);//from w w w . jav a2s .c o m } else { remote.add(liveReplicaHost); } } Collections.shuffle(remote); logger.trace("Using the following hosts order for the new query plan: {} | {}", local, remote); return Iterators.concat(local.iterator(), remote.iterator()); }
From source file:org.locationtech.geogig.osm.internal.CreateOSMChangesetOp.java
/** * Executes the diff operation./*from w w w . j a v a 2 s.c o m*/ * * @return an iterator to a set of differences between the two trees * @see DiffEntry */ @Override protected Iterator<ChangeContainer> _call() { Iterator<DiffEntry> nodeIterator = command(DiffOp.class).setFilter(OSMUtils.NODE_TYPE_NAME) .setNewVersion(newRefSpec).setOldVersion(oldRefSpec).setReportTrees(false).call(); Iterator<DiffEntry> wayIterator = command(DiffOp.class).setFilter(OSMUtils.WAY_TYPE_NAME) .setNewVersion(newRefSpec).setOldVersion(oldRefSpec).setReportTrees(false).call(); Iterator<DiffEntry> iterator = Iterators.concat(nodeIterator, wayIterator); final EntityConverter converter = new EntityConverter(); Function<DiffEntry, ChangeContainer> function = new Function<DiffEntry, ChangeContainer>() { @Override @Nullable public ChangeContainer apply(@Nullable DiffEntry diff) { NodeRef ref = diff.changeType().equals(ChangeType.REMOVED) ? diff.getOldObject() : diff.getNewObject(); RevFeature revFeature = command(RevObjectParse.class).setObjectId(ref.objectId()) .call(RevFeature.class).get(); RevFeatureType revFeatureType = command(RevObjectParse.class).setObjectId(ref.getMetadataId()) .call(RevFeatureType.class).get(); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( (SimpleFeatureType) revFeatureType.type()); ImmutableList<PropertyDescriptor> descriptors = revFeatureType.sortedDescriptors(); ImmutableList<Optional<Object>> values = revFeature.getValues(); for (int i = 0; i < descriptors.size(); i++) { PropertyDescriptor descriptor = descriptors.get(i); Optional<Object> value = values.get(i); featureBuilder.set(descriptor.getName(), value.orNull()); } SimpleFeature feature = featureBuilder.buildFeature(ref.name()); Entity entity = converter.toEntity(feature, id); EntityContainer container; if (entity instanceof Node) { container = new NodeContainer((Node) entity); } else { container = new WayContainer((Way) entity); } ChangeAction action = diff.changeType().equals(ChangeType.ADDED) ? ChangeAction.Create : diff.changeType().equals(ChangeType.MODIFIED) ? ChangeAction.Modify : ChangeAction.Delete; return new ChangeContainer(container, action); } }; return Iterators.transform(iterator, function); }
From source file:org.fcrepo.kernel.utils.iterators.RdfStream.java
/** * @param newTriples Triples to add./* ww w . j av a 2 s .co m*/ * @return This object for continued use. */ public RdfStream concat(final Iterator<? extends Triple> newTriples) { triples = Iterators.concat(newTriples, triples); return this; }
From source file:org.locationtech.geogig.osm.cli.commands.OSMExport.java
/** * Executes the export command using the provided options. *//*ww w. j ava 2s. c o m*/ @Override protected void runInternal(GeogigCLI cli) throws IOException { if (args.size() < 1 || args.size() > 2) { printUsage(cli); throw new CommandFailedException(); } checkParameter(bbox == null || bbox.size() == 4, "The specified bounding box is not correct"); geogig = cli.getGeogig(); String osmfile = args.get(0); String ref = "WORK_HEAD"; if (args.size() == 2) { ref = args.get(1); Optional<ObjectId> tree = geogig.command(ResolveTreeish.class).setTreeish(ref).call(); checkParameter(tree.isPresent(), "Invalid commit or reference: %s", ref); } File file = new File(osmfile); checkParameter(!file.exists() || overwrite, "The selected file already exists. Use -o to overwrite"); Iterator<EntityContainer> nodes = getFeatures(ref + ":node"); Iterator<EntityContainer> ways = getFeatures(ref + ":way"); Iterator<EntityContainer> iterator = Iterators.concat(nodes, ways); if (file.getName().endsWith(".pbf")) { BlockOutputStream output = new BlockOutputStream(new FileOutputStream(file)); OsmosisSerializer serializer = new OsmosisSerializer(output); while (iterator.hasNext()) { EntityContainer entity = iterator.next(); serializer.process(entity); } serializer.complete(); } else { XmlWriter writer = new XmlWriter(file, CompressionMethod.None); while (iterator.hasNext()) { EntityContainer entity = iterator.next(); writer.process(entity); } writer.complete(); } }