List of usage examples for com.google.common.collect Lists newArrayListWithCapacity
@GwtCompatible(serializable = true) public static <E> ArrayList<E> newArrayListWithCapacity(int initialArraySize)
From source file:edu.bsu.storygame.core.assets.AudioCache.java
public AudioCache(Assets assets) { List<RFuture<Sound>> futures = Lists.newArrayListWithCapacity(Key.values().length); for (final Key key : Key.values()) { final Sound sound = assets.getSound(key.relativePath); map.put(key, sound);//from www . j ava 2 s. c om futures.add(sound.state); } RFuture.collect(futures).onComplete(new Slot<Try<Collection<Sound>>>() { @Override public void onEmit(Try<Collection<Sound>> collectionTry) { if (collectionTry.isSuccess()) { ((RPromise<AudioCache>) state).succeed(AudioCache.this); } else { ((RPromise<AudioCache>) state).fail(collectionTry.getFailure()); } } }); }
From source file:org.eclipse.xtext.common.types.access.binary.asm.BinaryMethodSignature.java
public List<BinaryGenericTypeSignature> getExceptionTypes() { // skip type parameters int exceptionStart = chars.indexOf('^', offset); if (exceptionStart == -1) { int paren = chars.lastIndexOf(')'); if (paren == -1) { throw new IllegalArgumentException(); }//from www .ja v a2s . c o m // ignore return type exceptionStart = SignatureUtil.scanTypeSignature(chars, paren + 1) + 1; int length = offset + this.length; if (exceptionStart == length) return Collections.emptyList(); } int length = offset + this.length; int i = exceptionStart; List<BinaryGenericTypeSignature> result = Lists.newArrayListWithCapacity(2); while (i < length) { if (chars.charAt(i) == '^') { exceptionStart++; i++; } else { throw new IllegalArgumentException(); } i = SignatureUtil.scanTypeSignature(chars, i) + 1; result.add(new BinaryGenericTypeSignature(chars, exceptionStart, i - exceptionStart)); exceptionStart = i; } return result; }
From source file:org.apache.pulsar.client.impl.PartitionedProducerImpl.java
public PartitionedProducerImpl(PulsarClientImpl client, String topic, ProducerConfiguration conf, int numPartitions, CompletableFuture<Producer> producerCreatedFuture) { super(client, topic, conf, producerCreatedFuture); this.producers = Lists.newArrayListWithCapacity(numPartitions); this.numPartitions = numPartitions; this.routerPolicy = conf.getMessageRouter(numPartitions); stats = client.getConfiguration().getStatsIntervalSeconds() > 0 ? new ProducerStats() : null; start();//from w ww. j av a 2 s . c o m }
From source file:edu.bsu.storygame.core.LoadingScreen.java
public LoadingScreen(final MonsterGame game, final ScreenStack screenStack) { super(game.plat); this.game = checkNotNull(game); configureProgressBar();/*from w ww .ja va 2s.c om*/ List<RFuture<Boolean>> futures = Lists.newArrayListWithCapacity(NUMBER_OF_CACHES); futures.add(game.imageCache.state.map(new Function<ImageCache, Boolean>() { @Override public Boolean apply(ImageCache imageCache) { progressBar.increment(1); return true; } })); futures.add(game.audioCache.state.map(new Function<AudioCache, Boolean>() { @Override public Boolean apply(AudioCache audioCache) { progressBar.increment(1); return true; } })); futures.add(game.narrativeCache.state.map(new Function<Narrative, Boolean>() { @Override public Boolean apply(Narrative narrative) { progressBar.increment(1); return true; } })); RFuture.collect(futures).onComplete(new Slot<Try<Collection<Boolean>>>() { @Override public void onEmit(Try<Collection<Boolean>> collectionTry) { if (collectionTry.isSuccess()) { screenStack.push(new StartScreen(game), screenStack.slide().left()); } else { root.add(new Label("Failure caching resources; see log for details.") .setStyles(Style.COLOR.is(Colors.WHITE))); // IDEA is confused about the fact that we are actually handling this Throwable. Suppress it. //noinspection ThrowableResultOfMethodCallIgnored game.plat.log().error(collectionTry.getFailure().getMessage()); } } }); root = iface.createRoot(AxisLayout.vertical(), SimpleStyles.newSheet(game.plat.graphics()), layer) .setSize(size()).add(new Label("Loading...").addStyles(Style.COLOR.is(Colors.WHITE))); }
From source file:org.apache.pig.builtin.RollupDimensions.java
@Override public DataBag exec(Tuple tuple) throws IOException { List<Tuple> result = Lists.newArrayListWithCapacity(tuple.size() + 1); CubeDimensions.convertNullToUnknown(tuple); result.add(tuple);// ww w. j a va 2s . c om iterativelyRollup(result, tuple); return bf.newDefaultBag(result); }
From source file:org.terasology.entitySystem.metadata.extension.CollisionGroupTypeHandler.java
public List<CollisionGroup> deserializeList(EntityData.Value value) { List<CollisionGroup> result = Lists.newArrayListWithCapacity(value.getStringCount()); for (String name : value.getStringList()) { CollisionGroup group = groupManager.getCollisionGroup(name); if (group != null) { result.add(group);//from www .jav a 2 s .c o m } } return result; }
From source file:org.glowroot.agent.model.QueryCollector.java
public List<Aggregate.Query> toAggregateProto(SharedQueryTextCollection sharedQueryTextCollection, boolean includeActive) { // " + queries.size()" is to cover the maximum number of limit exceeded buckets List<Aggregate.Query> allQueries = Lists .newArrayListWithCapacity(Math.min(queryCount, limit) + queries.size()); for (Map.Entry<String, Map<String, MutableQuery>> outerEntry : queries.entrySet()) { for (Map.Entry<String, MutableQuery> innerEntry : outerEntry.getValue().entrySet()) { allQueries.add(innerEntry.getValue().toAggregateProto(outerEntry.getKey(), innerEntry.getKey(), sharedQueryTextCollection, includeActive)); }/*w w w. ja v a 2 s .c o m*/ } if (allQueries.size() <= limit) { // there could be limit exceeded buckets if hardLimitMultiplierWhileBuilding is 1 for (Map.Entry<String, MutableQuery> entry : limitExceededBuckets.entrySet()) { allQueries.add(entry.getValue().toAggregateProto(entry.getKey(), LIMIT_EXCEEDED_BUCKET, sharedQueryTextCollection, includeActive)); } sort(allQueries); return allQueries; } sort(allQueries); List<Aggregate.Query> exceededQueries = allQueries.subList(limit, allQueries.size()); allQueries = Lists.newArrayList(allQueries.subList(0, limit)); // do not modify original limit exceeded buckets since adding exceeded queries below Map<String, MutableQuery> limitExceededBuckets = copyLimitExceededBuckets(); for (Aggregate.Query exceededQuery : exceededQueries) { String queryType = exceededQuery.getType(); MutableQuery limitExceededBucket = limitExceededBuckets.get(queryType); if (limitExceededBucket == null) { limitExceededBucket = new MutableQuery(); limitExceededBuckets.put(queryType, limitExceededBucket); } limitExceededBucket.add(exceededQuery); } for (Map.Entry<String, MutableQuery> entry : limitExceededBuckets.entrySet()) { allQueries.add(entry.getValue().toAggregateProto(entry.getKey(), LIMIT_EXCEEDED_BUCKET, sharedQueryTextCollection, includeActive)); } // need to re-sort now including limit exceeded bucket sort(allQueries); return allQueries; }
From source file:com.github.steveash.jg2p.seq.PhonemeACrfTrainer2.java
private static InstanceList makeExamplesFromAligns(Iterable<Alignment> alignsToTrain, Pipe pipe) { int count = 0; InstanceList instances = new InstanceList(pipe); for (Alignment align : alignsToTrain) { List<String> xs = align.getWordUnigrams(); List<Boolean> bs = align.getXBoundaryMarks(); Iterator<String> ys = align.getAllYTokensAsList().iterator(); Preconditions.checkState(xs.size() == bs.size()); List<String[]> targets = Lists.newArrayListWithCapacity(xs.size()); for (int i = 0; i < xs.size(); i++) { targets.add(new String[] { bs.get(i) ? "1" : "0", bs.get(i) ? ys.next() : "<>" }); }//from w w w. j a v a 2s. c o m Instance ii = new Instance(xs, targets, null, null); instances.addThruPipe(ii); count += 1; // if (count > 1000) { // break; // } } log.info("Read {} instances of training data", count); return instances; }
From source file:io.druid.indexer.HadoopDruidDetermineConfigurationJob.java
@Override public boolean run() { List<Jobby> jobs = Lists.newArrayList(); JobHelper.ensurePaths(config);// w w w . ja v a 2 s . c o m if (config.isDeterminingPartitions()) { jobs.add(config.getPartitionsSpec().getPartitionJob(config)); } else { int shardsPerInterval = config.getPartitionsSpec().getNumShards(); Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance()); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { DateTime bucket = segmentGranularity.getStart(); if (shardsPerInterval > 0) { List<HadoopyShardSpec> specs = Lists.newArrayListWithCapacity(shardsPerInterval); for (int i = 0; i < shardsPerInterval; i++) { specs.add(new HadoopyShardSpec(new HashBasedNumberedShardSpec(i, shardsPerInterval, HadoopDruidIndexerConfig.jsonMapper), shardCount++)); } shardSpecs.put(bucket, specs); log.info("DateTime[%s], spec[%s]", bucket, specs); } else { final HadoopyShardSpec spec = new HadoopyShardSpec(new NoneShardSpec(), shardCount++); shardSpecs.put(bucket, Lists.newArrayList(spec)); log.info("DateTime[%s], spec[%s]", bucket, spec); } } config.setShardSpecs(shardSpecs); } return JobHelper.runJobs(jobs, config); }
From source file:org.gradle.model.internal.core.ModelGraph.java
public ModelSearchResult search(ModelPath path) { List<String> reached = Lists.newArrayListWithCapacity(path.getDepth()); ModelNode node = null;// w ww.java 2 s . c om ModelNode nextNode; for (String pathComponent : path) { if (node == null) { nextNode = entryNodes.get(pathComponent); } else { nextNode = node.getLinks().get(pathComponent); } if (nextNode == null) { if (reached.isEmpty()) { return new ModelSearchResult(null, path, null, null); } else { return new ModelSearchResult(null, path, node, new ModelPath(reached)); } } else { node = nextNode; } } return new ModelSearchResult(node, path, node, path); }