List of usage examples for com.google.common.collect Lists newArrayListWithExpectedSize
@GwtCompatible(serializable = true) public static <E> ArrayList<E> newArrayListWithExpectedSize(int estimatedSize)
From source file:com.yahoo.yqlplus.engine.internal.java.backends.java.KeyAccumulator.java
private int addKey(String name, List<KEY> keys) { Set<KEY> seen;//from w ww . j a v a 2 s . c om List<KEY> output; int idx; if (names.containsKey(name)) { idx = names.get(name); keyNames.add(name); seen = this.seen.get(idx); output = columns.get(idx); } else { idx = columns.size(); keyNames.add(name); names.put(name, idx); output = Lists.newArrayListWithExpectedSize(keys.size()); columns.add(output); seen = Sets.newHashSet(); this.seen.add(seen); } // preserve order but uniqueify for (KEY key : keys) { if (seen.add(key)) { output.add(key); } } return idx; }
From source file:com.mgmtp.jfunk.data.generator.field.DynamicYearField.java
public DynamicYearField(final MathRandom random, final Element element, final String characterSetId) { super(random, element, characterSetId); String minString = element.getChildText(XMLTags.MIN); if (minString.startsWith("+")) { minString = minString.substring(1); }/*from w w w . ja v a 2 s.c o m*/ String maxString = element.getChildText(XMLTags.MAX); if (maxString.startsWith("+")) { maxString = maxString.substring(1); } int min = Integer.parseInt(minString); int max = Integer.parseInt(maxString); checkState(min <= max, "min (=" + min + ") must not exceed the max (=" + max + ") "); int year = Calendar.getInstance().get(Calendar.YEAR); int minYear = year + min; int maxYear = year + max; values = Lists.newArrayListWithExpectedSize(maxYear - minYear + 1); for (int i = minYear; i <= maxYear; ++i) { values.add(String.valueOf(i)); } range = new Range(0, values.size() - 1); }
From source file:org.apache.shindig.gadgets.parse.caja.CajaCssParser.java
public List<ParsedCssRule> parse(String css) throws GadgetException { if (css.matches("\\s*")) { return Lists.newArrayList(); }/*from ww w. j av a2s . c o m*/ CssParser parser = getParser(css); CssTree.StyleSheet stylesheet = null; try { stylesheet = parser.parseStyleSheet(); } catch (ParseException e) { throw new GadgetException(GadgetException.Code.CSS_PARSE_ERROR, e); } ArrayList<ParsedCssRule> rules = Lists.newArrayListWithExpectedSize(stylesheet.children().size()); for (CssTree node : stylesheet.children()) { if (node instanceof CssTree.RuleSet) { rules.add(new CajaParsedCssRule((CssTree.RuleSet) node)); } } return rules; }
From source file:org.nmdp.ngs.align.BiojavaPairwiseAlignment.java
@Override public Iterable<AlignmentPair> local(final List<Sequence> queries, final List<Sequence> subjects, final GapPenalties gapPenalties) { checkNotNull(queries);/* ww w . j av a2 s . c o m*/ checkNotNull(subjects); checkNotNull(gapPenalties); if (queries.isEmpty() || subjects.isEmpty()) { return Collections.<AlignmentPair>emptyList(); } SmithWaterman smithWaterman = new SmithWaterman(gapPenalties.match(), gapPenalties.replace(), gapPenalties.insert(), gapPenalties.delete(), gapPenalties.extend(), getSubstitutionMatrix()); List<AlignmentPair> alignmentPairs = Lists.newArrayListWithExpectedSize(queries.size() * subjects.size()); for (Sequence query : queries) { for (Sequence subject : subjects) { AlignmentPair alignmentPair = smithWaterman.pairwiseAlignment(query, subject); alignmentPairs.add(alignmentPair); } } return alignmentPairs; }
From source file:com.google.devtools.depan.resource_doc.eclipse.ui.widgets.ResourceContainerAdapter.java
/** * Build the list of descendants from both children and resources. *///from w w w. ja v a 2 s .c o m private Object[] buildDescendants(ResourceContainer rsrcCntr) { Collection<ResourceContainer> children = rsrcCntr.getChildren(); Collection<Object> resources = rsrcCntr.getResources(); int size = children.size() + resources.size(); List<Object> result = Lists.newArrayListWithExpectedSize(size); result.addAll(children); result.addAll(resources); return result.toArray(); }
From source file:org.coinj.dash.DashChainExtension.java
@Override public void verifyBlockAddition(Block added, @Nullable List<Sha256Hash> filteredTxHashList, @Nullable Map<Sha256Hash, Transaction> filteredTxn) { if (network.permitsMasternodesLogic() && network.getSporkManager().isSporkActive(SporkManager.SPORK_3_INSTANTX_BLOCK_FILTERING)) { if (filteredTxHashList != null && filteredTxn != null) { List<Transaction> toCheck = Lists.newArrayListWithExpectedSize(filteredTxn.size()); for (final Sha256Hash txHash : filteredTxHashList) { final Transaction tx = filteredTxn.get(txHash); if (tx != null) toCheck.add(tx);/* w w w. j av a 2 s .co m*/ } if (toCheck.size() > 0) { checkTxs(toCheck, added.getHashAsString()); } } else { final List<Transaction> transactions = added.getTransactions(); if (transactions != null) { checkTxs(transactions, added.getHashAsString()); } } } }
From source file:org.eclipse.xtext.xbase.scoping.batch.ImplicitlyImportedTypesAdapter.java
@Override protected Collection<String> computeLiteralClassNames() { List<Class<?>> staticImportClasses = implicitlyImportedTypes.getStaticImportClasses(); List<String> result = Lists.newArrayListWithExpectedSize(staticImportClasses.size()); for (Class<?> clazz : staticImportClasses) { result.add(clazz.getName());//from w w w. j a v a2s.c o m } return result; }
From source file:org.apache.kylin.tool.metrics.systemcube.CubeDescCreator.java
public static CubeDesc generateKylinCubeDescForMetricsQuery(KylinConfig config, SinkTool sinkTool) { String tableName = sinkTool.getTableNameForMetrics(config.getKylinMetricsSubjectQuery()); //Set for dimensions List<String> dimensions = ModelCreator.getDimensionsForMetricsQuery(); dimensions.remove(TimePropertyEnum.DAY_TIME.toString()); dimensions.remove(RecordEvent.RecordReserveKeyEnum.TIME.toString()); List<DimensionDesc> dimensionDescList = Lists.newArrayListWithExpectedSize(dimensions.size()); for (String dimensionName : dimensions) { dimensionDescList.add(getDimensionDesc(tableName, dimensionName)); }/*from ww w . j ava2s. c o m*/ //Set for measures List<String> measures = ModelCreator.getMeasuresForMetricsQuery(); measures.remove(QueryPropertyEnum.ID_CODE.toString()); List<MeasureDesc> measureDescList = Lists.newArrayListWithExpectedSize(measures.size() * 2 + 1 + 1); List<Pair<String, String>> measureTypeList = HiveTableCreator.getHiveColumnsForMetricsQuery(); Map<String, String> measureTypeMap = Maps.newHashMapWithExpectedSize(measureTypeList.size()); for (Pair<String, String> entry : measureTypeList) { measureTypeMap.put(entry.getKey(), entry.getValue()); } measureDescList.add(getMeasureCount()); measureDescList.add(getMeasureMin(QueryPropertyEnum.TIME_COST.toString(), measureTypeMap.get(QueryPropertyEnum.TIME_COST.toString()))); for (String measure : measures) { measureDescList.add(getMeasureSum(measure, measureTypeMap.get(measure))); measureDescList.add(getMeasureMax(measure, measureTypeMap.get(measure))); } measureDescList.add(getMeasureHLL(QueryPropertyEnum.ID_CODE.toString())); measureDescList.add(getMeasurePercentile(QueryPropertyEnum.TIME_COST.toString())); //Set for row key RowKeyColDesc[] rowKeyColDescs = new RowKeyColDesc[dimensionDescList.size()]; int idx = getTimeRowKeyColDesc(tableName, rowKeyColDescs); rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.USER.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.PROJECT.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.REALIZATION.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.REALIZATION_TYPE.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.EXCEPTION.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, QueryPropertyEnum.TYPE.toString(), idx + 1); idx++; rowKeyColDescs[idx] = getRowKeyColDesc(tableName, RecordEvent.RecordReserveKeyEnum.HOST.toString(), idx + 1); idx++; RowKeyDesc rowKeyDesc = new RowKeyDesc(); rowKeyDesc.setRowkeyColumns(rowKeyColDescs); //Set for aggregation group String[][] hierarchy_dims = new String[2][]; hierarchy_dims[0] = getTimeHierarchy(); hierarchy_dims[1] = new String[2]; hierarchy_dims[1][0] = QueryPropertyEnum.REALIZATION_TYPE.toString(); hierarchy_dims[1][1] = QueryPropertyEnum.REALIZATION.toString(); for (int i = 0; i < hierarchy_dims.length; i++) { hierarchy_dims[i] = refineColumnWithTable(tableName, hierarchy_dims[i]); } SelectRule selectRule = new SelectRule(); selectRule.mandatoryDims = new String[0]; selectRule.hierarchyDims = hierarchy_dims; selectRule.jointDims = new String[0][0]; AggregationGroup aggGroup = new AggregationGroup(); aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions)); aggGroup.setSelectRule(selectRule); //Set for hbase mapping HBaseMappingDesc hBaseMapping = new HBaseMappingDesc(); hBaseMapping.setColumnFamily(getHBaseColumnFamily(measureDescList)); return generateKylinCubeDesc(tableName, sinkTool.getStorageType(), dimensionDescList, measureDescList, rowKeyDesc, aggGroup, hBaseMapping, sinkTool.getCubeDescOverrideProperties()); }
From source file:org.nmdp.ngs.align.ParallelBiojavaPairwiseAlignment.java
@Override public Iterable<AlignmentPair> local(final List<Sequence> queries, final List<Sequence> subjects, final GapPenalties gapPenalties) { checkNotNull(queries);/*from ww w. j a v a 2 s. c om*/ checkNotNull(subjects); checkNotNull(gapPenalties); if (queries.isEmpty() || subjects.isEmpty()) { return Collections.<AlignmentPair>emptyList(); } List<SmithWatermanTask> tasks = Lists.newArrayListWithExpectedSize(queries.size() * subjects.size()); for (Sequence query : queries) { for (Sequence subject : subjects) { tasks.add(new SmithWatermanTask(query, subject, gapPenalties, getSubstitutionMatrix())); } } List<AlignmentPair> alignmentPairs = Lists.newArrayListWithExpectedSize(queries.size() * subjects.size()); try { List<Future<AlignmentPair>> futures = executorService.invokeAll(tasks); for (Future<AlignmentPair> future : futures) { alignmentPairs.add(future.get()); } } catch (ExecutionException | InterruptedException e) { // todo } return alignmentPairs; }
From source file:org.apache.druid.query.search.SearchBinaryFn.java
@Override public Result<SearchResultValue> apply(Result<SearchResultValue> arg1, Result<SearchResultValue> arg2) { if (arg1 == null) { return arg2; }//from w w w.ja v a 2 s. c o m if (arg2 == null) { return arg1; } final int limit = gran instanceof AllGranularity ? this.limit : -1; SearchResultValue arg1Vals = arg1.getValue(); SearchResultValue arg2Vals = arg2.getValue(); Iterable<SearchHit> merged = Iterables.mergeSorted(Arrays.asList(arg1Vals, arg2Vals), searchSortSpec.getComparator()); int maxSize = arg1Vals.getValue().size() + arg2Vals.getValue().size(); if (limit > 0) { maxSize = Math.min(limit, maxSize); } List<SearchHit> results = Lists.newArrayListWithExpectedSize(maxSize); SearchHit prev = null; for (SearchHit searchHit : merged) { if (prev == null) { prev = searchHit; continue; } if (prev.equals(searchHit)) { if (prev.getCount() != null && searchHit.getCount() != null) { prev = new SearchHit(prev.getDimension(), prev.getValue(), prev.getCount() + searchHit.getCount()); } else { prev = new SearchHit(prev.getDimension(), prev.getValue()); } } else { results.add(prev); prev = searchHit; if (limit > 0 && results.size() >= limit) { break; } } } if (prev != null && (limit < 0 || results.size() < limit)) { results.add(prev); } final DateTime timestamp = gran instanceof AllGranularity ? arg1.getTimestamp() : gran.bucketStart(arg1.getTimestamp()); return new Result<SearchResultValue>(timestamp, new SearchResultValue(results)); }