List of usage examples for com.google.common.collect PeekingIterator next
@Override E next();
The objects returned by consecutive calls to #peek() then #next() are guaranteed to be equal to each other.
From source file:com.thinkbiganalytics.spark.shell.SparkShellProcessBuilder.java
/** * Adds the specified arguments to the Spark invocation. *//* w w w . j a v a 2s.c om*/ @Nonnull public SparkShellProcessBuilder addSparkArgs(@Nonnull final Iterable<String> args) { final PeekingIterator<String> iter = Iterators.peekingIterator(args.iterator()); while (iter.hasNext()) { final String arg = iter.next(); if (!SWITCHES.contains(arg) && iter.hasNext()) { addSparkArg(arg, iter.next()); } else { addSparkArg(arg); } } return this; }
From source file:org.bitcoinj.wallet.DeterministicKeyChain.java
/** * Returns all the key chains found in the given list of keys. Typically there will only be one, but in the case of * key rotation it can happen that there are multiple chains found. *//*from w w w .j a va 2s . c o m*/ public static List<DeterministicKeyChain> fromProtobuf(List<Protos.Key> keys, @Nullable KeyCrypter crypter, KeyChainFactory factory) throws UnreadableWalletException { List<DeterministicKeyChain> chains = newLinkedList(); DeterministicSeed seed = null; DeterministicKeyChain chain = null; int lookaheadSize = -1; int sigsRequiredToSpend = 1; List<ChildNumber> accountPath = newArrayList(); PeekingIterator<Protos.Key> iter = Iterators.peekingIterator(keys.iterator()); while (iter.hasNext()) { Protos.Key key = iter.next(); final Protos.Key.Type t = key.getType(); if (t == Protos.Key.Type.DETERMINISTIC_MNEMONIC) { accountPath = newArrayList(); for (int i : key.getAccountPathList()) { accountPath.add(new ChildNumber(i)); } if (accountPath.isEmpty()) accountPath = ACCOUNT_ZERO_PATH; if (chain != null) { checkState(lookaheadSize >= 0); chain.setLookaheadSize(lookaheadSize); chain.setSigsRequiredToSpend(sigsRequiredToSpend); chain.maybeLookAhead(); chains.add(chain); chain = null; } long timestamp = key.getCreationTimestamp() / 1000; String passphrase = DEFAULT_PASSPHRASE_FOR_MNEMONIC; // FIXME allow non-empty passphrase if (key.hasSecretBytes()) { if (key.hasEncryptedDeterministicSeed()) throw new UnreadableWalletException("Malformed key proto: " + key.toString()); byte[] seedBytes = null; if (key.hasDeterministicSeed()) { seedBytes = key.getDeterministicSeed().toByteArray(); } seed = new DeterministicSeed(key.getSecretBytes().toStringUtf8(), seedBytes, passphrase, timestamp); } else if (key.hasEncryptedData()) { if (key.hasDeterministicSeed()) throw new UnreadableWalletException("Malformed key proto: " + key.toString()); EncryptedData data = new EncryptedData( key.getEncryptedData().getInitialisationVector().toByteArray(), key.getEncryptedData().getEncryptedPrivateKey().toByteArray()); EncryptedData encryptedSeedBytes = null; if (key.hasEncryptedDeterministicSeed()) { Protos.EncryptedData encryptedSeed = key.getEncryptedDeterministicSeed(); encryptedSeedBytes = new EncryptedData( encryptedSeed.getInitialisationVector().toByteArray(), encryptedSeed.getEncryptedPrivateKey().toByteArray()); } seed = new DeterministicSeed(data, encryptedSeedBytes, timestamp); } else { throw new UnreadableWalletException("Malformed key proto: " + key.toString()); } if (log.isDebugEnabled()) log.debug("Deserializing: DETERMINISTIC_MNEMONIC: {}", seed); } else if (t == Protos.Key.Type.DETERMINISTIC_KEY) { if (!key.hasDeterministicKey()) throw new UnreadableWalletException("Deterministic key missing extra data: " + key.toString()); byte[] chainCode = key.getDeterministicKey().getChainCode().toByteArray(); // Deserialize the path through the tree. LinkedList<ChildNumber> path = newLinkedList(); for (int i : key.getDeterministicKey().getPathList()) path.add(new ChildNumber(i)); // Deserialize the public key and path. LazyECPoint pubkey = new LazyECPoint(ECKey.CURVE.getCurve(), key.getPublicKey().toByteArray()); final ImmutableList<ChildNumber> immutablePath = ImmutableList.copyOf(path); // Possibly create the chain, if we didn't already do so yet. boolean isWatchingAccountKey = false; boolean isFollowingKey = false; // save previous chain if any if the key is marked as following. Current key and the next ones are to be // placed in new following key chain if (key.getDeterministicKey().getIsFollowing()) { if (chain != null) { checkState(lookaheadSize >= 0); chain.setLookaheadSize(lookaheadSize); chain.setSigsRequiredToSpend(sigsRequiredToSpend); chain.maybeLookAhead(); chains.add(chain); chain = null; seed = null; } isFollowingKey = true; } if (chain == null) { // If this is not a following chain and previous was, this must be married boolean isMarried = !isFollowingKey && !chains.isEmpty() && chains.get(chains.size() - 1).isFollowing(); if (seed == null) { DeterministicKey accountKey = new DeterministicKey(immutablePath, chainCode, pubkey, null, null); accountKey.setCreationTimeSeconds(key.getCreationTimestamp() / 1000); chain = factory.makeWatchingKeyChain(key, iter.peek(), accountKey, isFollowingKey, isMarried); isWatchingAccountKey = true; } else { chain = factory.makeKeyChain(key, iter.peek(), seed, crypter, isMarried, ImmutableList.<ChildNumber>builder().addAll(accountPath).build()); chain.lookaheadSize = LAZY_CALCULATE_LOOKAHEAD; // If the seed is encrypted, then the chain is incomplete at this point. However, we will load // it up below as we parse in the keys. We just need to check at the end that we've loaded // everything afterwards. } } // Find the parent key assuming this is not the root key, and not an account key for a watching chain. DeterministicKey parent = null; if (!path.isEmpty() && !isWatchingAccountKey) { ChildNumber index = path.removeLast(); parent = chain.hierarchy.get(path, false, false); path.add(index); } DeterministicKey detkey; if (key.hasSecretBytes()) { // Not encrypted: private key is available. final BigInteger priv = new BigInteger(1, key.getSecretBytes().toByteArray()); detkey = new DeterministicKey(immutablePath, chainCode, pubkey, priv, parent); } else { if (key.hasEncryptedData()) { Protos.EncryptedData proto = key.getEncryptedData(); EncryptedData data = new EncryptedData(proto.getInitialisationVector().toByteArray(), proto.getEncryptedPrivateKey().toByteArray()); checkNotNull(crypter, "Encountered an encrypted key but no key crypter provided"); detkey = new DeterministicKey(immutablePath, chainCode, crypter, pubkey, data, parent); } else { // No secret key bytes and key is not encrypted: either a watching key or private key bytes // will be rederived on the fly from the parent. detkey = new DeterministicKey(immutablePath, chainCode, pubkey, null, parent); } } if (key.hasCreationTimestamp()) detkey.setCreationTimeSeconds(key.getCreationTimestamp() / 1000); if (log.isDebugEnabled()) log.debug("Deserializing: DETERMINISTIC_KEY: {}", detkey); if (!isWatchingAccountKey) { // If the non-encrypted case, the non-leaf keys (account, internal, external) have already // been rederived and inserted at this point. In the encrypted case though, // we can't rederive and we must reinsert, potentially building the hierarchy object // if need be. if (path.isEmpty()) { // Master key. if (chain.rootKey == null) { chain.rootKey = detkey; chain.hierarchy = new DeterministicHierarchy(detkey); } } else if (path.size() == chain.getAccountPath().size() + 1) { // Constant 0 is used for external chain and constant 1 for internal chain // (also known as change addresses). https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki if (detkey.getChildNumber().num() == 0) { // External chain is used for addresses that are meant to be visible outside of the wallet // (e.g. for receiving payments). chain.externalParentKey = detkey; chain.issuedExternalKeys = key.getDeterministicKey().getIssuedSubkeys(); lookaheadSize = Math.max(lookaheadSize, key.getDeterministicKey().getLookaheadSize()); sigsRequiredToSpend = key.getDeterministicKey().getSigsRequiredToSpend(); } else if (detkey.getChildNumber().num() == 1) { // Internal chain is used for addresses which are not meant to be visible outside of the // wallet and is used for return transaction change. chain.internalParentKey = detkey; chain.issuedInternalKeys = key.getDeterministicKey().getIssuedSubkeys(); } } } chain.hierarchy.putKey(detkey); chain.basicKeyChain.importKey(detkey); } } if (chain != null) { checkState(lookaheadSize >= 0); chain.setLookaheadSize(lookaheadSize); chain.setSigsRequiredToSpend(sigsRequiredToSpend); chain.maybeLookAhead(); chains.add(chain); } return chains; }
From source file:org.apache.jackrabbit.oak.plugins.document.RevisionVector.java
/** * Calculates the parallel maximum of this and the given {@code vector}. * * @param vector the other vector.//from ww w .ja va2s .c o m * @return the parallel maximum of the two. */ public RevisionVector pmax(@Nonnull RevisionVector vector) { // optimize single revision case if (revisions.length == 1 && vector.revisions.length == 1) { if (revisions[0].getClusterId() == vector.revisions[0].getClusterId()) { return revisions[0].compareRevisionTime(vector.revisions[0]) > 0 ? this : vector; } else { return new RevisionVector(revisions[0], vector.revisions[0]); } } int capacity = Math.max(revisions.length, vector.revisions.length); List<Revision> pmax = newArrayListWithCapacity(capacity); PeekingIterator<Revision> it = peekingIterator(vector.iterator()); for (Revision r : revisions) { while (it.hasNext() && it.peek().getClusterId() < r.getClusterId()) { pmax.add(it.next()); } Revision other = peekRevision(it, r.getClusterId()); if (other != null && other.getClusterId() == r.getClusterId()) { pmax.add(Utils.max(r, other)); it.next(); } else { // other does not have a revision with r.clusterId pmax.add(r); } } // add remaining Iterators.addAll(pmax, it); return new RevisionVector(toArray(pmax, Revision.class), false, false); }
From source file:com.google.cloud.genomics.localrepo.vcf.VCFReader.java
public MetaInformation parseMetaInfo(PeekingIterator<String> iterator) { MetaInformation.Builder metainfo = MetaInformation.builder(getFormat(iterator)); while (iterator.hasNext() && iterator.peek().startsWith("##")) { String next = iterator.next(); Matcher matcher = METAINFO_LINE_PATTERN.matcher(next); if (!matcher.matches()) { throw new IllegalStateException(String.format("Failed to parse metainfo line: \"%s\"", next)); }//from w w w.j av a 2s .c o m String type = matcher.group(1); String value = matcher.group(2); switch (type) { case "INFO": Map<String, String> map = parseCompoundValue(value); MetaInformation.Info.Builder info = MetaInformation.Info.builder().setId(map.get("ID")) .setNumber(MetaInformation.Number.create(map.get("Number"))) .setType(MetaInformation.Info.Type.parse(map.get("Type"))) .setDescription(map.get("Description")); for (Map.Entry<String, String> entry : getExtraFields(map, MetaInformation.Info.REQUIRED_FIELDS)) { info.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addInfo(info); break; case "FILTER": MetaInformation.Filter.Builder filter = MetaInformation.Filter.builder() .setId((map = parseCompoundValue(value)).get("ID")).setDescription(map.get("Description")); for (Map.Entry<String, String> entry : getExtraFields(map, MetaInformation.Filter.REQUIRED_FIELDS)) { filter.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addFilter(filter); break; case "FORMAT": MetaInformation.Format.Builder format = MetaInformation.Format.builder() .setId((map = parseCompoundValue(value)).get("ID")) .setNumber(MetaInformation.Number.create(map.get("Number"))) .setType(MetaInformation.Format.Type.parse(map.get("Type"))) .setDescription(map.get("Description")); for (Map.Entry<String, String> entry : getExtraFields(map, MetaInformation.Format.REQUIRED_FIELDS)) { format.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addFormat(format); break; case "ALT": MetaInformation.Alt.Builder alt = MetaInformation.Alt.builder() .setId(MetaInformation.Alt.Type.parse((map = parseCompoundValue(value)).get("ID"))) .setDescription(map.get("Description")); for (Map.Entry<String, String> entry : getExtraFields(map, MetaInformation.Alt.REQUIRED_FIELDS)) { alt.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addAlt(alt); break; case "assembly": metainfo.addAssembly(MetaInformation.Assembly.create(url(value))); break; case "contig": MetaInformation.Contig.Builder contig = MetaInformation.Contig.builder() .setId((map = parseCompoundValue(value, false)).get("ID")); for (Map.Entry<String, String> entry : getExtraFields(map, MetaInformation.Contig.REQUIRED_FIELDS)) { contig.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addContig(contig); break; case "SAMPLE": metainfo.addSample(MetaInformation.Sample.builder() .setId((map = parseCompoundValue(value)).get("ID")).setGenome(map.get("Genome")) .setMixture(map.get("Mixture")).setDescription(map.get("Description")).build()); break; case "PEDIGREE": MetaInformation.Pedigree.Builder pedigree = MetaInformation.Pedigree.builder(); for (Map.Entry<String, String> entry : parseCompoundValue(value).entrySet()) { pedigree.addExtraField(entry.getKey(), entry.getValue()); } metainfo.addPedigree(pedigree); break; case "pedigreeDB": metainfo.addPedigreeDB(MetaInformation.PedigreeDB.create(url(value))); break; default: metainfo.addUnparsedLine(MetaInformation.UnparsedMetaInfoLine.create(type, value)); } } return metainfo.build(); }
From source file:org.apache.druid.collections.OrderedMergeIterator.java
@Override public T next() { if (!hasNext()) { throw new NoSuchElementException(); }/*from w w w. java2s . co m*/ final PeekingIterator<T> littleIt; if (!iterOfIterators.hasNext()) { littleIt = firstElementComparedPQueue.remove(); } else if (firstElementComparedPQueue.isEmpty()) { littleIt = iterOfIterators.next(); } else { T pQueueValue = firstElementComparedPQueue.peek().peek(); T iterItersValue = iterOfIterators.peek().peek(); if (comparator.compare(pQueueValue, iterItersValue) <= 0) { littleIt = firstElementComparedPQueue.remove(); } else { littleIt = iterOfIterators.next(); } } T retVal = littleIt.next(); if (littleIt.hasNext()) { firstElementComparedPQueue.add(littleIt); } return retVal; }
From source file:org.geogit.api.plumbing.diff.DiffCounter.java
private DiffObjectCount countChildrenDiffs(Iterator<Node> leftTree, Iterator<Node> rightTree) { final Ordering<Node> storageOrder = new NodeStorageOrder(); DiffObjectCount count = new DiffObjectCount(); PeekingIterator<Node> left = Iterators.peekingIterator(leftTree); PeekingIterator<Node> right = Iterators.peekingIterator(rightTree); while (left.hasNext() && right.hasNext()) { Node peekLeft = left.peek(); Node peekRight = right.peek(); if (0 == storageOrder.compare(peekLeft, peekRight)) { // same path, consume both peekLeft = left.next(); peekRight = right.next();//from ww w .ja v a2 s .c o m if (!peekLeft.getObjectId().equals(peekRight.getObjectId())) { // find the diffs between these two specific refs if (RevObject.TYPE.FEATURE.equals(peekLeft.getType())) { checkState(RevObject.TYPE.FEATURE.equals(peekRight.getType())); count.addFeatures(1); } else { checkState(RevObject.TYPE.TREE.equals(peekLeft.getType())); checkState(RevObject.TYPE.TREE.equals(peekRight.getType())); ObjectId leftTreeId = peekLeft.getObjectId(); ObjectId rightTreeId = peekRight.getObjectId(); count.add(countDiffs(leftTreeId, rightTreeId)); } } } else if (peekLeft == storageOrder.min(peekLeft, peekRight)) { peekLeft = left.next();// consume only the left value count.add(aggregateSize(ImmutableList.of(peekLeft))); } else { peekRight = right.next();// consume only the right value count.add(aggregateSize(ImmutableList.of(peekRight))); } } if (left.hasNext()) { count.add(countRemaining(left)); } else if (right.hasNext()) { count.add(countRemaining(right)); } Preconditions.checkState(!left.hasNext()); Preconditions.checkState(!right.hasNext()); return count; }
From source file:com.github.rinde.logistics.pdptw.solver.optaplanner.ScoreCalculator.java
@Nullable ParcelVisit updateRouteRemovals(Vehicle v) { final List<ParcelVisit> prevRoute = ImmutableList.copyOf(routes.get(v)); final List<ParcelVisit> newRoute = updateCurRoute(v); final PeekingIterator<ParcelVisit> prevIt = Iterators.peekingIterator(prevRoute.iterator()); final PeekingIterator<ParcelVisit> newIt = Iterators.peekingIterator(newRoute.iterator()); while (prevIt.hasNext() && newIt.hasNext() && prevIt.peek().equals(newIt.peek())) { // advance both iterators until we are at the position of the first // difference prevIt.next(); newIt.next();/*w ww.ja v a 2s .c o m*/ } while (prevIt.hasNext()) { remove(prevIt.next()); } if (newIt.hasNext()) { return newIt.peek(); } else { return null; } }
From source file:com.foundationdb.server.store.statistics.histograms.Sampler.java
private List<Bucket<T>> mergeUnpopularsIntoPopulars(PopularitySplit<T> split) { Deque<Bucket<T>> populars = split.popularBuckets; assert populars.size() >= maxSize : "failed populars.size[" + populars.size() + "] >= maxSize[" + maxSize + "]"; PeekingIterator<Bucket<T>> unpopulars = Iterators.peekingIterator(split.regularBuckets.iterator()); List<Bucket<T>> results = new ArrayList<>(populars.size()); BucketSampler<T> sampler = new BucketSampler<>(maxSize, split.popularsCount, false); for (Bucket<T> popular : populars) { if (sampler.add(popular)) { // merge in all the unpopulars less than this one while (unpopulars.hasNext() && unpopulars.peek().value().compareTo(popular.value()) <= 0) { Bucket<T> mergeMe = unpopulars.next(); mergeUp(mergeMe, popular); }/*w ww . java2 s . c o m*/ results.add(popular); } } // now, create one last value which merges in all of the remaining unpopulars Bucket<T> last = null; while (unpopulars.hasNext()) { Bucket<T> unpopular = unpopulars.next(); if (last != null) mergeUp(last, unpopular); last = unpopular; } if (last != null) results.add(last); return results; }
From source file:com.github.rinde.rinsim.cli.Menu.java
/** * Parses and executes the provided command-line arguments. * @param args The arguments to parse.//from w ww . j av a2 s .c o m * @return A string containing the help message, or {@link Optional#absent()} * if no help was requested. * @throws CliException If anything in the parsing or execution went wrong. */ public Optional<String> execute(String... args) { final PeekingIterator<String> it = Iterators.peekingIterator(Iterators.forArray(args)); final Set<Option> selectedOptions = newLinkedHashSet(); while (it.hasNext()) { final String arg = it.next(); final Optional<OptionParser> optParser = parseOption(arg); checkCommand(optParser.isPresent(), "Found unrecognized command: '%s'.", arg); checkAlreadySelected(!selectedOptions.contains(optParser.get().getOption()), optParser.get().getOption(), "Option is already selected: %s.", optParser.get().getOption()); if (groupMap.containsKey(optParser.get().getOption())) { // this option is part of a option group final SetView<Option> intersect = Sets.intersection(selectedOptions, newLinkedHashSet(groupMap.get(optParser.get().getOption()))); checkAlreadySelected(intersect.isEmpty(), optParser.get().getOption(), "An option from the same group as '%s' has already been selected: " + "'%s'.", optParser.get().getOption(), intersect); } selectedOptions.add(optParser.get().getOption()); if (optParser.get().getOption().isHelpOption()) { return Optional.of(printHelp()); } final List<String> arguments = newArrayList(); // if a non-option string is following the current option, it must be // the argument of the current option. while (it.hasNext() && !parseOption(it.peek()).isPresent()) { arguments.add(it.next()); } try { optParser.get().parse(arguments); } catch (IllegalArgumentException | IllegalStateException e) { throw new CliException(e.getMessage(), e, CauseType.HANDLER_FAILURE, optParser.get().getOption()); } } return Optional.absent(); }
From source file:com.google.cloud.storage.contrib.nio.UnixPath.java
/** * Returns {@code other} made relative to {@code path}. * * @see java.nio.file.Path#relativize(java.nio.file.Path) *///from www. j a v a 2 s . c om public UnixPath relativize(UnixPath other) { checkArgument(isAbsolute() == other.isAbsolute(), "'other' is different type of Path"); if (path.isEmpty()) { return other; } PeekingIterator<String> left = Iterators.peekingIterator(split()); PeekingIterator<String> right = Iterators.peekingIterator(other.split()); while (left.hasNext() && right.hasNext()) { if (!left.peek().equals(right.peek())) { break; } left.next(); right.next(); } StringBuilder result = new StringBuilder(path.length() + other.path.length()); while (left.hasNext()) { result.append(PARENT_DIR); result.append(SEPARATOR); left.next(); } while (right.hasNext()) { result.append(right.next()); result.append(SEPARATOR); } if (result.length() > 0 && !other.hasTrailingSeparator()) { result.deleteCharAt(result.length() - 1); } return new UnixPath(permitEmptyComponents, result.toString()); }