Example usage for com.google.common.collect Lists newArrayListWithCapacity

List of usage examples for com.google.common.collect Lists newArrayListWithCapacity

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithCapacity.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithCapacity(int initialArraySize) 

Source Link

Document

Creates an ArrayList instance backed by an array with the specified initial size; simply delegates to ArrayList#ArrayList(int) .

Usage

From source file:eu.project.ttc.models.GroovyTerm.java

public GroovyTerm(Term term, GroovyAdapter adapter) {
    this.term = term;

    this.compound = term.isCompound();
    this.isSingleWord = term.isSingleWord();
    this.neoclassical = term.isCompound()
            && term.firstWord().getWord().getCompoundType() == CompoundType.NEOCLASSICAL;
    this.pattern = term.getPattern();
    this.lemma = term.getGroupingKey();
    this.stem = term.firstWord().getWord().getStem();
    List<GroovyWord> aux = Lists.newArrayListWithCapacity(term.getWords().size());

    for (TermWord w : term.getWords())
        aux.add(adapter.asGroovyWord(w));
    this.words = ImmutableList.copyOf(aux);
}

From source file:com.android.build.gradle.integration.common.fixture.GetAndroidModelAction.java

@Override
public Map<String, AndroidProject> execute(BuildController buildController) {

    long t1 = System.currentTimeMillis();
    GradleBuild gradleBuild = buildController.getBuildModel();
    DomainObjectSet<? extends BasicGradleProject> projects = gradleBuild.getProjects();

    final int projectCount = projects.size();
    Map<String, AndroidProject> modelMap = Maps.newHashMapWithExpectedSize(projectCount);

    List<BasicGradleProject> projectList = Lists.newArrayList(projects);
    List<Thread> threads = Lists.newArrayListWithCapacity(CPU_COUNT);
    List<ModelQuery> queries = Lists.newArrayListWithCapacity(CPU_COUNT);

    for (int i = 0; i < CPU_COUNT; i++) {
        ModelQuery modelQuery = new ModelQuery(projectList, buildController);
        queries.add(modelQuery);//ww  w.j av  a 2  s. co  m
        Thread t = new Thread(modelQuery);
        threads.add(t);
        t.start();
    }

    for (int i = 0; i < CPU_COUNT; i++) {
        try {
            threads.get(i).join();
            ModelQuery modelQuery = queries.get(i);
            modelMap.putAll(modelQuery.getModels());
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
    }

    long t2 = System.currentTimeMillis();
    System.out.println("GetAndroidModelAction: " + (t2 - t1) + "ms");

    return modelMap;
}

From source file:goja.initialize.ctxbox.ClassSearcher.java

public ClassSearcher(Class... targets) {
    this.targets = Lists.newArrayListWithCapacity(targets.length);
    Collections.addAll(this.targets, targets);
}

From source file:org.glowroot.agent.model.ServiceCallCollector.java

public List<Aggregate.ServiceCall> toAggregateProto() {
    // " + serviceCalls.size()" is to cover the maximum number of limit exceeded buckets
    List<Aggregate.ServiceCall> allServiceCalls = Lists
            .newArrayListWithCapacity(Math.min(serviceCallCount, limit) + serviceCalls.size());
    for (Map.Entry<String, Map<String, MutableServiceCall>> outerEntry : serviceCalls.entrySet()) {
        for (Map.Entry<String, MutableServiceCall> innerEntry : outerEntry.getValue().entrySet()) {
            allServiceCalls/*from  w w w  . j  a  v a2  s. c  om*/
                    .add(innerEntry.getValue().toAggregateProto(outerEntry.getKey(), innerEntry.getKey()));
        }
    }
    if (allServiceCalls.size() <= limit) {
        // there could be limit exceeded buckets if hardLimitMultiplierWhileBuilding is 1
        for (Map.Entry<String, MutableServiceCall> entry : limitExceededBuckets.entrySet()) {
            allServiceCalls.add(entry.getValue().toAggregateProto(entry.getKey(), LIMIT_EXCEEDED_BUCKET));
        }
        sort(allServiceCalls);
        return allServiceCalls;
    }
    sort(allServiceCalls);
    List<Aggregate.ServiceCall> exceededServiceCalls = allServiceCalls.subList(limit, allServiceCalls.size());
    allServiceCalls = Lists.newArrayList(allServiceCalls.subList(0, limit));
    // do not modify original limit exceeded buckets since adding exceeded queries below
    Map<String, MutableServiceCall> limitExceededBuckets = copyLimitExceededBuckets();
    for (Aggregate.ServiceCall exceededServiceCall : exceededServiceCalls) {
        String queryType = exceededServiceCall.getType();
        MutableServiceCall limitExceededBucket = limitExceededBuckets.get(queryType);
        if (limitExceededBucket == null) {
            limitExceededBucket = new MutableServiceCall();
            limitExceededBuckets.put(queryType, limitExceededBucket);
        }
        limitExceededBucket.add(exceededServiceCall);
    }
    for (Map.Entry<String, MutableServiceCall> entry : limitExceededBuckets.entrySet()) {
        allServiceCalls.add(entry.getValue().toAggregateProto(entry.getKey(), LIMIT_EXCEEDED_BUCKET));
    }
    // need to re-sort now including limit exceeded bucket
    sort(allServiceCalls);
    return allServiceCalls;
}

From source file:org.grouplens.lenskit.eval.metrics.AbstractTestUserMetric.java

/**
 * Make a user result row. This expands it to the length of the user columns, inserting
 * {@code null}s as needed.// ww w .  j  a v  a  2s.co  m
 * @return The result row, the same length as {@link #getUserColumnLabels()}.
 */
protected List<Object> userRow(Object... results) {
    int len = getUserColumnLabels().size();
    Preconditions.checkArgument(results.length <= len, "too many results");
    ;
    List<Object> row = Lists.newArrayListWithCapacity(len);
    Collections.addAll(row, results);
    while (row.size() < len) {
        row.add(null);
    }
    return row;
}

From source file:co.cask.cdap.metrics.query.MetricsRequestContext.java

private MetricsRequestContext(String typeId, MetricsRequestParser.PathType pathType,
        MetricsRequestParser.RequestType requestType, String requestId, String componentId, TagType tagType,
        String tag) {/*from w w  w  . ja v  a 2  s  . c o  m*/
    this.typeId = typeId;
    this.pathType = pathType;
    this.requestType = requestType;
    this.requestId = requestId;
    this.componentId = componentId;
    this.tagType = tagType;
    this.tag = tag;

    List<String> contextParts = Lists.newArrayListWithCapacity(4);
    if (typeId == null || typeId.isEmpty()) {
        this.contextPrefix = null;
    } else {
        contextParts.add(typeId);
        if (requestType != null) {
            if (!requestType.equals(MetricsRequestParser.RequestType.HANDLERS)) {
                contextParts.add(requestType.getCode());
            }
            if (requestId != null && !requestId.isEmpty()) {
                contextParts.add(requestId);
                if (componentId != null && !componentId.isEmpty()) {
                    contextParts.add(componentId);
                }
            }
        }
        this.contextPrefix = Joiner.on(".").join(contextParts);
    }
}

From source file:com.google.walkaround.wave.server.admin.FlagsHandler.java

@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    List<FlagDisplayRecord> records = Lists.newArrayListWithCapacity(parsedFlags.size());
    for (Map.Entry<FlagDeclaration, Object> e : parsedFlags.entrySet()) {
        records.add(new FlagDisplayRecord(e.getKey().getName(), "" + e.getKey().getType().getName(),
                "" + e.getValue()));
    }/*from   ww w .j  av  a2 s .  com*/
    Collections.sort(records, new Comparator<FlagDisplayRecord>() {
        @Override
        public int compare(FlagDisplayRecord a, FlagDisplayRecord b) {
            return a.getName().compareTo(b.getName());
        }
    });

    resp.setContentType("text/html");
    Admin.write(resp.getWriter(), new GxpContext(getLocale(req)), analyticsAccount,
            FlagsFragment.getGxpClosure(records, rawFlags));
}

From source file:com.opengamma.web.analytics.ViewportResultsJsonWriter.java

public String getJson(ViewportResults viewportResults) {
    List<ViewportResults.Cell> viewportCells = viewportResults.getResults();
    List<Object> results = Lists.newArrayListWithCapacity(viewportCells.size());
    for (ViewportResults.Cell cell : viewportCells) {
        Object formattedValue;//  ww w  .jav  a  2 s .c om
        Object cellValue = cell.getValue();
        ValueSpecification cellValueSpec = cell.getValueSpecification();
        formattedValue = _formatter.format(cellValue, cellValueSpec, viewportResults.getFormat());
        Collection<Object> history = cell.getHistory();
        Class<?> columnType = viewportResults.getColumnType(cell.getColumn());
        DataType columnFormat = _formatter.getDataType(columnType);
        Map<String, Object> valueMap = Maps.newHashMap();
        valueMap.put(VALUE, formattedValue);
        if (columnFormat == UNKNOWN) {
            // if the the column type isn't known then send the type with the value
            valueMap.put(TYPE, _formatter.getDataTypeForValue(cellValue, cellValueSpec).name());
        }
        if (history != null) {
            valueMap.put(HISTORY, formatHistory(cellValueSpec, history));
        }
        if (cell.isError() || isError(formattedValue)) {
            valueMap.put(ERROR, true);
        }
        if (cell.getPositionId() != null) {
            valueMap.put(POSITION_ID, cell.getPositionId());
        }
        if (cell.getNodeId() != null) {
            valueMap.put(NODE_ID, cell.getNodeId());
        }
        // TODO add logging metadata to results
        results.add(valueMap);
    }
    String duration = _durationFormatter
            .format(new BigDecimal(viewportResults.getCalculationDuration().toMillisLong()));
    ImmutableMap<String, Object> resultsMap = ImmutableMap.of(VERSION, viewportResults.getVersion(),
            CALCULATION_DURATION, duration, DATA, results);
    return new JSONObject(resultsMap).toString();
}

From source file:com.nesscomputing.sequencer.HashSequencer.java

private HashSequencer(int size, Iterable<K> elements) {
    keyToInt = new TObjectIntHashMap<K>(size, LOAD_FACTOR, -1);
    intToKey = Lists.newArrayListWithCapacity(size);

    int i = 0;//from   w ww  . j  a  v a  2  s .  com
    Iterator<K> iter = elements.iterator();
    while (iter.hasNext()) {
        K key = iter.next();
        this.intToKey.add(key);
        this.keyToInt.put(key, i++);
    }

    this.nextInt = intToKey.size();
}

From source file:com.google.gerrit.server.query.change.ConflictsPredicate.java

private static List<Predicate<ChangeData>> predicates(final Arguments args, String value, List<Change> changes)
        throws QueryParseException, OrmException {
    int indexTerms = 0;

    List<Predicate<ChangeData>> changePredicates = Lists.newArrayListWithCapacity(changes.size());
    final Provider<ReviewDb> db = args.db;
    for (final Change c : changes) {
        final ChangeDataCache changeDataCache = new ChangeDataCache(c, db, args.changeDataFactory,
                args.projectCache);//w  w  w .j  a va2  s.c om
        List<String> files = listFiles(c, args, changeDataCache);
        indexTerms += 3 + files.size();
        if (indexTerms > args.indexConfig.maxTerms()) {
            // Short-circuit with a nice error message if we exceed the index
            // backend's term limit. This assumes that "conflicts:foo" is the entire
            // query; if there are more terms in the input, we might not
            // short-circuit here, which will result in a more generic error message
            // later on in the query parsing.
            throw new QueryParseException(TOO_MANY_FILES);
        }

        List<Predicate<ChangeData>> filePredicates = Lists.newArrayListWithCapacity(files.size());
        for (String file : files) {
            filePredicates.add(new EqualsPathPredicate(ChangeQueryBuilder.FIELD_PATH, file));
        }

        List<Predicate<ChangeData>> predicatesForOneChange = Lists.newArrayListWithCapacity(5);
        predicatesForOneChange.add(not(new LegacyChangeIdPredicate(c.getId())));
        predicatesForOneChange.add(new ProjectPredicate(c.getProject().get()));
        predicatesForOneChange.add(new RefPredicate(c.getDest().get()));

        predicatesForOneChange.add(or(or(filePredicates), new IsMergePredicate(args, value)));

        predicatesForOneChange.add(new ChangeOperatorPredicate(ChangeQueryBuilder.FIELD_CONFLICTS, value) {

            @Override
            public boolean match(ChangeData object) throws OrmException {
                Change otherChange = object.change();
                if (otherChange == null) {
                    return false;
                }
                if (!otherChange.getDest().equals(c.getDest())) {
                    return false;
                }
                SubmitTypeRecord str = object.submitTypeRecord();
                if (!str.isOk()) {
                    return false;
                }
                ObjectId other = ObjectId.fromString(object.currentPatchSet().getRevision().get());
                ConflictKey conflictsKey = new ConflictKey(changeDataCache.getTestAgainst(), other, str.type,
                        changeDataCache.getProjectState().isUseContentMerge());
                Boolean conflicts = args.conflictsCache.getIfPresent(conflictsKey);
                if (conflicts != null) {
                    return conflicts;
                }
                try (Repository repo = args.repoManager.openRepository(otherChange.getProject());
                        CodeReviewRevWalk rw = CodeReviewCommit.newRevWalk(repo)) {
                    conflicts = !args.submitDryRun.run(str.type, repo, rw, otherChange.getDest(),
                            changeDataCache.getTestAgainst(), other, getAlreadyAccepted(repo, rw));
                    args.conflictsCache.put(conflictsKey, conflicts);
                    return conflicts;
                } catch (IntegrationException | NoSuchProjectException | IOException e) {
                    throw new OrmException(e);
                }
            }

            @Override
            public int getCost() {
                return 5;
            }

            private Set<RevCommit> getAlreadyAccepted(Repository repo, RevWalk rw) throws IntegrationException {
                try {
                    Set<RevCommit> accepted = new HashSet<>();
                    SubmitDryRun.addCommits(changeDataCache.getAlreadyAccepted(repo), rw, accepted);
                    ObjectId tip = changeDataCache.getTestAgainst();
                    if (tip != null) {
                        accepted.add(rw.parseCommit(tip));
                    }
                    return accepted;
                } catch (OrmException | IOException e) {
                    throw new IntegrationException("Failed to determine already accepted commits.", e);
                }
            }
        });
        changePredicates.add(and(predicatesForOneChange));
    }
    return changePredicates;
}