Example usage for com.google.common.collect Lists newArrayListWithExpectedSize

List of usage examples for com.google.common.collect Lists newArrayListWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithExpectedSize.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithExpectedSize(int estimatedSize) 

Source Link

Document

Creates an ArrayList instance to hold estimatedSize elements, plus an unspecified amount of padding; you almost certainly mean to call #newArrayListWithCapacity (see that method for further advice on usage).

Usage

From source file:ru.codeinside.gses.webui.data.ControlledTasksQuery.java

private TaskQuery createTaskQuery() {
    TaskQuery query = Flash.flash().getProcessEngine().getTaskService().createTaskQuery();
    ((TaskQueryImpl2) query).setIgnoreAssignee(false);
    List<String> resultGroups;
    if (superSupervisor) {
        if (orgGroups == null && empGroups != null) {
            resultGroups = empGroups;//w  w  w.  j  a v a2  s.co m
        } else if (empGroups == null && orgGroups != null) {
            resultGroups = orgGroups;
        } else if (empGroups != null && orgGroups != null) {
            orgGroups.addAll(empGroups);
            resultGroups = orgGroups;
        } else {
            resultGroups = Lists.newArrayListWithExpectedSize(0);
        }
    } else {
        resultGroups = Lists.newArrayListWithExpectedSize(controlledGroups.size());
        resultGroups.addAll(controlledGroups);
        if (orgGroups == null && empGroups != null) {
            resultGroups.retainAll(empGroups);
        } else if (empGroups == null && orgGroups != null) {
            resultGroups.retainAll(orgGroups);
        } else if (empGroups != null && orgGroups != null) {
            orgGroups.addAll(empGroups);
            resultGroups.retainAll(orgGroups);
        }
    }
    if (!resultGroups.isEmpty()) {
        query.taskCandidateGroupIn(resultGroups);
    }
    if (processInstanceId != null) {
        query.processInstanceId(processInstanceId);
    }
    if (type != null) {
        query.processVariableValueEquals(VAR_PROCEDURE_TYPE_NAME, Integer.toString(type.ordinal()));
    }
    if (!StringUtils.isEmpty(serviceId)) {
        query.processVariableValueEquals(VAR_SERVICE_ID, serviceId);
    }
    if (taskKey != null && !taskKey.isEmpty()) {
        query.taskDefinitionKey(taskKey);
    }
    if (procedureId != null && !procedureId.isEmpty()) {
        query.processVariableValueEquals(VAR_PROCEDURE_ID, procedureId);
    }
    if (declarantTypeName != null && declarantTypeValue != null) {
        query.processVariableValueEquals(declarantTypeName, declarantTypeValue);
    }
    if (requester != null && !requester.isEmpty()) {
        query.processVariableValueEquals(VAR_REQUESTER_LOGIN, requester);
    }
    if (fromDate != null) {
        query.taskCreatedAfter(DateUtils.addSeconds(fromDate, -1));
    }
    if (toDate != null) {
        query.taskCreatedBefore(DateUtils.addSeconds(toDate, 1));
    }
    if (overdue) {
        ((TaskQueryImpl2) query).setOverdue(true);
    }
    return query;
}

From source file:org.n52.svalbard.decode.json.InsertSensorRequestDecoder.java

protected List<SwesFeatureRelationship> parseFeatureRelationships(JsonNode node) throws DecodingException {
    if (node.isArray()) {
        List<SwesFeatureRelationship> list = Lists.newArrayListWithExpectedSize(node.size());
        for (JsonNode n : node) {
            if (n.isObject()) {
                list.add(parseFeatureRelationship(n));
            }/*  w  w  w  . j  av a2  s  . c  om*/
        }
        return list;
    } else if (node.isObject()) {
        return Collections.singletonList(parseFeatureRelationship(node));
    } else {
        return null;
    }
}

From source file:com.google.gerrit.server.index.OnlineReindexer.java

public void activateIndex() {
    indexes.setSearchIndex(index);//from  w  w w .j a v  a  2  s  . c  o  m
    log.info("Using schema version {}", version(index));
    try {
        index.markReady(true);
    } catch (IOException e) {
        log.warn("Error activating new schema version {}", version(index));
    }

    List<I> toRemove = Lists.newArrayListWithExpectedSize(1);
    for (I i : indexes.getWriteIndexes()) {
        if (version(i) != version(index)) {
            toRemove.add(i);
        }
    }
    for (I i : toRemove) {
        try {
            i.markReady(false);
            indexes.removeWriteIndex(version(i));
        } catch (IOException e) {
            log.warn("Error deactivating old schema version {}", version(i));
        }
    }
}

From source file:org.apache.drill.exec.planner.logical.DirPathBuilder.java

private void initPathComponents() {
    int maxHierarchy = partitionDescriptor.getMaxHierarchyLevel();
    dirNameList = Lists.newArrayListWithExpectedSize(maxHierarchy);
    conjunctList = Lists.newArrayListWithExpectedSize(maxHierarchy);
    for (int i = 0; i < maxHierarchy; i++) {
        dirNameList.add(EMPTY_STRING);/*from w ww . j a  v  a2s .c om*/
        conjunctList.add(null);
    }
}

From source file:org.attribyte.api.http.impl.servlet.Bridge.java

@SuppressWarnings("unchecked")
/**/*w  ww .  j  a v a  2s .c o  m*/
 * Creates a request from a servlet HTTP request.
 * <p>
 *   Sets an attribute, <code>remoteAddr</code> with the address reported
 *   by the servlet API.
 * </p>
 * @param request The servlet request.
 * @param maxBodyBytes The maximum number of bytes read. If < 1, the body is not read.
 */
public static final Request fromServletRequest(final HttpServletRequest request, final int maxBodyBytes)
        throws IOException {

    Map<String, Header> headers = Maps.newHashMapWithExpectedSize(8);
    List<String> valueList = Lists.newArrayListWithExpectedSize(2);
    Enumeration headerNames = request.getHeaderNames();
    while (headerNames.hasMoreElements()) {
        String name = (String) headerNames.nextElement();
        Enumeration headerValues = request.getHeaders(name);
        valueList.clear();
        while (headerValues.hasMoreElements()) {
            valueList.add((String) headerValues.nextElement());
        }

        if (valueList.size() == 1) {
            headers.put(name, new Header(name, valueList.get(0)));
        } else {
            headers.put(name, new Header(name, valueList.toArray(new String[valueList.size()])));
        }
    }

    final String queryString = request.getQueryString();

    final String requestURL = Strings.isNullOrEmpty(queryString) ? request.getRequestURL().toString()
            : request.getRequestURL().append('?').append(queryString).toString();

    final Map parameterMap = request.getParameterMap();

    Method method = Method.fromString(request.getMethod());
    switch (method) {
    case GET: {
        GetRequestBuilder grb = new GetRequestBuilder(requestURL, parameterMap);
        grb.addHeaders(headers);
        grb.addAttribute("remoteAddr", request.getRemoteAddr());
        return grb.create();
    }
    case HEAD: {
        HeadRequestBuilder hrb = new HeadRequestBuilder(requestURL, parameterMap);
        hrb.addHeaders(headers);
        hrb.addAttribute("remoteAddr", request.getRemoteAddr());
        return hrb.create();
    }
    case DELETE: {
        DeleteRequestBuilder drb = new DeleteRequestBuilder(requestURL, request.getParameterMap());
        drb.addHeaders(headers);
        drb.addAttribute("remoteAddr", request.getRemoteAddr());
        return drb.create();
    }
    }

    if (parameterMap != null && parameterMap.size() > 0) {
        FormPostRequestBuilder prb = new FormPostRequestBuilder(requestURL);
        prb.addHeaders(headers);
        prb.addParameters(request.getParameterMap());
        prb.addAttribute("remoteAddr", request.getRemoteAddr());
        return prb.create();
    } else {
        byte[] body = null;
        if (maxBodyBytes > 0) {
            InputStream is = request.getInputStream();
            try {
                body = Request.bodyFromInputStream(is, maxBodyBytes);
            } finally {
                is.close();
            }
        } else {
            ByteStreams.toByteArray(request.getInputStream()); //Read, but ignore the body...
        }

        if (method == Method.POST) {
            PostRequestBuilder prb = new PostRequestBuilder(requestURL, body);
            prb.addHeaders(headers);
            prb.addAttribute("remoteAddr", request.getRemoteAddr());
            return prb.create();
        } else {
            PutRequestBuilder prb = new PutRequestBuilder(requestURL, body);
            prb.addHeaders(headers);
            prb.addAttribute("remoteAddr", request.getRemoteAddr());
            return prb.create();
        }
    }
}

From source file:com.google.auto.factory.processor.Parameter.java

static ImmutableSet<Parameter> forParameterList(List<? extends VariableElement> variables) {
    List<TypeMirror> variableTypes = Lists.newArrayListWithExpectedSize(variables.size());
    for (VariableElement var : variables) {
        variableTypes.add(var.asType());
    }// w  w  w . ja va  2  s .  c o  m
    return forParameterList(variables, variableTypes);
}

From source file:org.apache.hadoop.hdfs.server.namenode.XAttrFeature.java

public ImmutableList<XAttr> getXAttrs() throws TransactionContextException, StorageException {
    Collection<StoredXAttr> extendedAttributes = EntityManager.findList(StoredXAttr.Finder.ByInodeId, inodeId);

    if (extendedAttributes == null)
        return EMPTY_ENTRY_LIST;

    List<XAttr> attrs = Lists.newArrayListWithExpectedSize(extendedAttributes.size());
    for (StoredXAttr attr : extendedAttributes) {
        attrs.add(convertStoredtoXAttr(attr));
    }/* w w w.ja v  a 2s.co  m*/
    return ImmutableList.copyOf(attrs);
}

From source file:org.apache.phoenix.hive.mapreduce.PhoenixInputSplit.java

@Override
public void readFields(DataInput in) throws IOException {
    super.readFields(in);

    int count = WritableUtils.readVInt(in);
    scans = Lists.newArrayListWithExpectedSize(count);
    for (int i = 0; i < count; i++) {
        byte[] protoScanBytes = new byte[WritableUtils.readVInt(in)];
        in.readFully(protoScanBytes);//from  ww  w  .j  av  a2s.  c om
        ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
        Scan scan = ProtobufUtil.toScan(protoScan);
        scans.add(scan);
    }
    init();

    query = WritableUtils.readString(in);
    regionSize = WritableUtils.readVLong(in);
}

From source file:com.palantir.atlasdb.keyvalue.cassandra.jmx.CassandraJmxCompactionManager.java

private boolean removeHintedHandoff(long timeoutInSeconds) throws InterruptedException, TimeoutException {
    List<HintedHandOffDeletionTask> hintedHandoffDeletionTasks = Lists
            .newArrayListWithExpectedSize(clients.size());
    for (CassandraJmxCompactionClient client : clients) {
        hintedHandoffDeletionTasks.add(new HintedHandOffDeletionTask(client));
    }/*from   ww w  . j av a2s .c o m*/

    return executeInParallel(exec, hintedHandoffDeletionTasks, timeoutInSeconds);
}

From source file:edu.cmu.cs.lti.ark.fn.identification.latentmodel.LatentFeatureExtractor.java

/**
 * Extract features for a (frame, target, hidden l.u.) tuple
 *
 * @param frameName the name of the candidate frame
 * @param targetTokenIdxs the token indexes (0-indexed) of the target
 * @param hiddenLexUnit the latent l.u.//from www  . j a  v  a 2s .c  om
 * @param allLemmaTags the sentence in AllLemmaTags format
 * @param parse the dependency parse for the sentence
 * @param parseHasLemmas whether or not allLemma already includes lemmas for each token
 * @return a map from feature name -> count
 */
public IntCounter<String> extractFeatures(String frameName, int[] targetTokenIdxs, String hiddenLexUnit,
        String[][] allLemmaTags, DependencyParse parse, boolean parseHasLemmas) {
    // Get lemmas and postags for prototype
    // hiddenLexUnit is in format: "form1_pos1 form2_pos2 ... formn_posn"
    final String[] hiddenTokenAndPos = hiddenLexUnit.split(" ");
    final List<String> hiddenTokenAndCpostags = Lists.newArrayListWithExpectedSize(hiddenTokenAndPos.length);
    final List<String> hiddenTokens = Lists.newArrayListWithExpectedSize(hiddenTokenAndPos.length);
    final List<String> hiddenCpostags = Lists.newArrayListWithExpectedSize(hiddenTokenAndPos.length);
    final List<String> hiddenLemmaAndCpostags = Lists.newArrayListWithExpectedSize(hiddenTokenAndPos.length);
    for (String hiddenTok : hiddenTokenAndPos) {
        final String[] arr = hiddenTok.split("_");
        final String form = arr[0];
        final String postag = arr[1].toUpperCase();
        final String cpostag = getCpostag(postag);
        final String lemma = lemmatizer.getLemma(form, postag);
        hiddenCpostags.add(cpostag);
        hiddenTokens.add(form);
        hiddenTokenAndCpostags.add(form + "_" + cpostag);
        hiddenLemmaAndCpostags.add(lemma + "_" + cpostag);
    }
    final String hiddenTokenAndCpostagsStr = UNDERSCORE.join(hiddenTokenAndCpostags);
    final String hiddenCpostagsStr = UNDERSCORE.join(hiddenCpostags);
    final String hiddenLemmaAndCpostagsStr = UNDERSCORE.join(hiddenLemmaAndCpostags);

    // Get lemmas and postags for target
    final List<String> actualTokenAndCpostags = Lists.newArrayListWithExpectedSize(targetTokenIdxs.length);
    final List<String> actualTokens = Lists.newArrayListWithExpectedSize(targetTokenIdxs.length);
    final List<String> actualCpostags = Lists.newArrayListWithExpectedSize(targetTokenIdxs.length);
    final List<String> actualLemmaAndCpostags = Lists.newArrayListWithExpectedSize(targetTokenIdxs.length);
    Arrays.sort(targetTokenIdxs);
    for (int tokenIdx : targetTokenIdxs) {
        final String form = allLemmaTags[PARSE_TOKEN_ROW][tokenIdx];
        final String postag = allLemmaTags[PARSE_POS_ROW][tokenIdx].toUpperCase();
        final String cpostag = getCpostag(postag);
        final String lemma = parseHasLemmas ? allLemmaTags[PARSE_LEMMA_ROW][tokenIdx]
                : lemmatizer.getLemma(form, postag);
        actualTokens.add(form);
        actualTokenAndCpostags.add(form + "_" + cpostag);
        actualCpostags.add(cpostag);
        actualLemmaAndCpostags.add(lemma + "_" + cpostag);
    }
    final String actualTokenAndCpostagsStr = UNDERSCORE.join(actualTokenAndCpostags);
    final String actualCpostagsStr = UNDERSCORE.join(actualCpostags);
    final String actualLemmaAndCpostagsStr = UNDERSCORE.join(actualLemmaAndCpostags);

    final Set<String> relations = wnRelations.getRelations(SPACE.join(actualTokens), SPACE.join(hiddenTokens));

    final IntCounter<String> featureMap = new IntCounter<String>();
    /*
     * base features
     * will be conjoined in various ways
     * (always conjoined with the frame name)
     */
    final String frameFtr = "f:" + frameName;
    final String actualCpostagsFtr = "aP:" + actualCpostagsStr;
    final String actualLemmaAndCpostagsFtr = "aLP:" + actualLemmaAndCpostagsStr;
    final String hiddenTokenAndCpostagsFtr = "hT:" + hiddenTokenAndCpostagsStr;
    final String hiddenCpostagsFtr = "hP:" + hiddenCpostagsStr;
    final String hiddenLemmaAndCpostagsFtr = "hLP:" + hiddenLemmaAndCpostagsStr;

    // add a feature for each word in the sentence
    for (int tokenIdx : xrange(allLemmaTags[0].length)) {
        final String form = allLemmaTags[PARSE_TOKEN_ROW][tokenIdx];
        final String postag = allLemmaTags[PARSE_POS_ROW][tokenIdx].toUpperCase();
        final String cpostag = getCpostag(postag);
        final String lemma = parseHasLemmas ? allLemmaTags[PARSE_LEMMA_ROW][tokenIdx]
                : lemmatizer.getLemma(form, postag);
        featureMap.increment(UNDERSCORE.join("sTP:" + form + "_" + cpostag, frameFtr));
        featureMap.increment(UNDERSCORE.join("sLP:" + lemma + "_" + cpostag, frameFtr));
    }

    featureMap.increment(UNDERSCORE.join(hiddenTokenAndCpostagsFtr, frameFtr));
    featureMap.increment(UNDERSCORE.join(hiddenLemmaAndCpostagsFtr, frameFtr));

    // extract features for each WordNet relation by which the target and prototype are connected
    for (String relation : relations) {
        if (relation.equals(WordNetRelations.NO_RELATION))
            continue;
        final String relationFeature = "tRLn:" + relation;
        featureMap.increment(UNDERSCORE.join(relationFeature, frameFtr));
        featureMap.increment(UNDERSCORE.join(relationFeature, hiddenTokenAndCpostagsStr, frameFtr));
        featureMap.increment(UNDERSCORE.join(relationFeature, hiddenTokenAndCpostagsStr, hiddenCpostagsFtr,
                actualCpostagsFtr, frameFtr));
    }

    if (hiddenTokenAndCpostagsStr.equals(actualTokenAndCpostagsStr)) {
        final String tokenMatchFtr = "sTs";
        featureMap.increment(UNDERSCORE.join(tokenMatchFtr, frameFtr));
        featureMap.increment(UNDERSCORE.join(tokenMatchFtr, hiddenTokenAndCpostagsFtr, frameFtr));
        featureMap.increment(UNDERSCORE.join(tokenMatchFtr, actualCpostagsFtr, hiddenCpostagsFtr, frameFtr));
        featureMap.increment(
                UNDERSCORE.join(tokenMatchFtr, actualLemmaAndCpostagsFtr, hiddenLemmaAndCpostagsFtr, frameFtr));
    }
    if (hiddenLemmaAndCpostagsStr.equals(actualLemmaAndCpostagsStr)) {
        final String lemmaMatchFtr = "sLs";
        featureMap.increment(UNDERSCORE.join(lemmaMatchFtr, frameFtr));
        featureMap.increment(UNDERSCORE.join(lemmaMatchFtr, hiddenTokenAndCpostagsFtr, frameFtr));
        featureMap.increment(UNDERSCORE.join(lemmaMatchFtr, actualCpostagsFtr, hiddenCpostagsFtr, frameFtr));
        featureMap.increment(
                UNDERSCORE.join(lemmaMatchFtr, actualLemmaAndCpostagsFtr, hiddenLemmaAndCpostagsFtr, frameFtr));
    }

    /*
     * syntactic features
     */
    final DependencyParse[] sortedNodes = parse.getIndexSortedListOfNodes();
    final DependencyParse head = DependencyParse.getHeuristicHead(sortedNodes, targetTokenIdxs);
    final String headCpostag = getCpostag(head.getPOS());

    final List<DependencyParse> children = head.getChildren();

    final SortedSet<String> depLabels = Sets.newTreeSet(); // unordered set of arc labels of children
    for (DependencyParse child : children) {
        depLabels.add(child.getLabelType().toUpperCase());
    }
    final String dependencyFtr = "d:" + UNDERSCORE.join(depLabels);
    featureMap.increment(UNDERSCORE.join(dependencyFtr, frameFtr));

    if (headCpostag.equals("V")) {
        final List<String> subcat = Lists.newArrayListWithExpectedSize(children.size()); // ordered arc labels of children
        for (DependencyParse child : children) {
            final String labelType = child.getLabelType().toUpperCase();
            if (!labelType.equals("SUB") && !labelType.equals("P") && !labelType.equals("CC")) {
                // TODO(smt): why exclude "sub"?
                subcat.add(labelType);
            }
        }
        final String subcatFtr = "sC:" + UNDERSCORE.join(subcat);
        featureMap.increment(UNDERSCORE.join(subcatFtr, frameFtr));
    }

    final DependencyParse parent = head.getParent();
    final String parentPosFtr = "pP:" + ((parent == null) ? "NULL" : parent.getPOS().toUpperCase());
    featureMap.increment(UNDERSCORE.join(parentPosFtr, frameFtr));
    final String parentLabelFtr = "pL:" + ((parent == null) ? "NULL" : parent.getLabelType().toUpperCase());
    featureMap.increment(UNDERSCORE.join(parentLabelFtr, frameFtr));

    return featureMap;
}