Example usage for com.google.common.collect Lists newArrayListWithCapacity

List of usage examples for com.google.common.collect Lists newArrayListWithCapacity

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithCapacity.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithCapacity(int initialArraySize) 

Source Link

Document

Creates an ArrayList instance backed by an array with the specified initial size; simply delegates to ArrayList#ArrayList(int) .

Usage

From source file:org.gradle.api.internal.artifacts.ModuleComponentSelectorSerializer.java

public VersionConstraint readVersionConstraint(Decoder decoder) throws IOException {
    String required = decoder.readString();
    String preferred = decoder.readString();
    String strictly = decoder.readString();
    int cpt = decoder.readSmallInt();
    List<String> rejects = Lists.newArrayListWithCapacity(cpt);
    for (int i = 0; i < cpt; i++) {
        rejects.add(decoder.readString());
    }// w  ww .  jav  a 2s. co  m
    return new DefaultImmutableVersionConstraint(preferred, required, strictly, rejects);
}

From source file:org.apache.phoenix.pig.util.SqlQueryToColumnInfoFunction.java

@Override
public List<ColumnInfo> apply(String sqlQuery) {
    Preconditions.checkNotNull(sqlQuery);
    Connection connection = null;
    List<ColumnInfo> columnInfos = null;
    try {//  ww  w.j  ava  2s. co  m
        connection = ConnectionUtil.getInputConnection(this.configuration);
        final Statement statement = connection.createStatement();
        final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
        final QueryPlan queryPlan = pstmt.compileQuery(sqlQuery);
        final List<? extends ColumnProjector> projectedColumns = queryPlan.getProjector().getColumnProjectors();
        columnInfos = Lists.newArrayListWithCapacity(projectedColumns.size());
        columnInfos = Lists.transform(projectedColumns, new Function<ColumnProjector, ColumnInfo>() {
            @Override
            public ColumnInfo apply(final ColumnProjector columnProjector) {
                return new ColumnInfo(columnProjector.getName(),
                        columnProjector.getExpression().getDataType().getSqlType());
            }

        });
    } catch (SQLException e) {
        LOG.error(String.format(" Error [%s] parsing SELECT query [%s] ", e.getMessage(), sqlQuery));
        throw new RuntimeException(e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException sqle) {
                LOG.error("Error closing connection!!");
                throw new RuntimeException(sqle);
            }
        }
    }
    return columnInfos;
}

From source file:kr.debop4j.timeperiod.timerange.YearRangeCollection.java

/** Year ? ? . */
public List<YearRange> getYears() {
    int start = getStartYear();

    List<YearRange> years = Lists.newArrayListWithCapacity(getYearCount());
    for (int y = 0; y < getYearCount(); y++)
        years.add(new YearRange(start + y, getTimeCalendar()));

    return years;
}

From source file:org.apache.cassandra.dht.tokenallocator.TokenAllocation.java

private static Collection<Token> adjustForCrossDatacenterClashes(final TokenMetadata tokenMetadata,
        StrategyAdapter strategy, Collection<Token> tokens) {
    List<Token> filtered = Lists.newArrayListWithCapacity(tokens.size());

    for (Token t : tokens) {
        while (tokenMetadata.getEndpoint(t) != null) {
            InetAddress other = tokenMetadata.getEndpoint(t);
            if (strategy.inAllocationRing(other))
                throw new ConfigurationException(String.format(
                        "Allocated token %s already assigned to node %s. Is another node also allocating tokens?",
                        t, other));//  www  .  j  av a2  s.c  om
            t = t.increaseSlightly();
        }
        filtered.add(t);
    }
    return filtered;
}

From source file:com.opengamma.web.analytics.formatting.LocalDateLabelledMatrix1DFormatter.java

private Map<String, Object> formatExpanded(LocalDateLabelledMatrix1D value, ValueSpecification valueSpec) {
    Map<String, Object> resultsMap = Maps.newHashMap();
    int length = value.getKeys().length;
    List<List<String>> results = Lists.newArrayListWithCapacity(length);
    for (int i = 0; i < length; i++) {
        String label = value.getLabels()[i].toString();
        String formattedValue = _doubleFormatter.formatCell(value.getValues()[i], valueSpec, null);
        List<String> rowResults = ImmutableList.of(label, formattedValue);
        results.add(rowResults);//w  w w  .  j  av  a 2 s  .co m
    }
    resultsMap.put(DATA, results);
    String labelsTitle = value.getLabelsTitle() != null ? value.getLabelsTitle() : LABEL;
    String valuesTitle = value.getValuesTitle() != null ? value.getValuesTitle() : VALUE;
    resultsMap.put(LABELS, ImmutableList.of(labelsTitle, valuesTitle));
    return resultsMap;
}

From source file:com.netflix.atlas.client.interpreter.MultipleExprList.java

@Override
public ListValueExpression addFilter(Query query) {
    List<Object> newExpressions = Lists.newArrayListWithCapacity(expressions.size());
    for (Object o : expressions) {
        if (o instanceof ValueExpression) {
            ValueExpression expression = (ValueExpression) o;
            newExpressions.add(expression.addFilter(query));
        } else {/*from  ww  w.  j a va  2 s . c  o  m*/
            ListValueExpression expression = (ListValueExpression) o;
            newExpressions.add(expression.addFilter(query));
        }
    }
    return new MultipleExprList(newExpressions);
}

From source file:org.apache.mahout.cf.taste.hadoop.item.VectorAndPrefsWritable.java

@Override
public void readFields(DataInput in) throws IOException {
    VectorWritable writable = new VectorWritable();
    writable.readFields(in);/*from w w  w.  j  a  va 2s  .c o  m*/
    vector = writable.get();
    int size = Varint.readUnsignedVarInt(in);
    userIDs = Lists.newArrayListWithCapacity(size);
    values = Lists.newArrayListWithCapacity(size);
    for (int i = 0; i < size; i++) {
        userIDs.add(Varint.readSignedVarLong(in));
        values.add(in.readFloat());
    }
}

From source file:com.google.template.soy.basetree.MixinParentNode.java

/**
 * Copy constructor.//w  w  w . j  a v  a  2 s  .c o m
 * @param orig The node to copy.
 * @param newMaster The master node for the copy.
 */
public MixinParentNode(MixinParentNode<N> orig, ParentNode<N> newMaster, CopyState copyState) {
    this.master = newMaster;

    this.needsEnvFrameDuringInterp = orig.needsEnvFrameDuringInterp;

    this.children = Lists.newArrayListWithCapacity(orig.children.size());
    for (N origChild : orig.children) {
        @SuppressWarnings("unchecked")
        N newChild = (N) origChild.copy(copyState);
        this.children.add(newChild);
        newChild.setParent(this.master);
    }
}

From source file:org.sonar.duplications.internal.pmd.PmdBlockChunker.java

public List<Block> chunk(String resourceId, List<TokensLine> fragments) {
    List<TokensLine> filtered = Lists.newArrayList();
    int i = 0;/*from www  .  jav  a2s .  c  o  m*/
    while (i < fragments.size()) {
        TokensLine first = fragments.get(i);
        int j = i + 1;
        while (j < fragments.size() && fragments.get(j).getValue().equals(first.getValue())) {
            j++;
        }
        filtered.add(fragments.get(i));
        if (i < j - 1) {
            filtered.add(fragments.get(j - 1));
        }
        i = j;
    }
    fragments = filtered;

    if (fragments.size() < blockSize) {
        return Collections.emptyList();
    }
    TokensLine[] fragmentsArr = fragments.toArray(new TokensLine[fragments.size()]);
    List<Block> blocks = Lists.newArrayListWithCapacity(fragmentsArr.length - blockSize + 1);
    long hash = 0;
    int first = 0;
    int last = 0;
    for (; last < blockSize - 1; last++) {
        hash = hash * PRIME_BASE + fragmentsArr[last].getHashCode();
    }
    Block.Builder blockBuilder = Block.builder().setResourceId(resourceId);
    for (; last < fragmentsArr.length; last++, first++) {
        TokensLine firstFragment = fragmentsArr[first];
        TokensLine lastFragment = fragmentsArr[last];
        // add last statement to hash
        hash = hash * PRIME_BASE + lastFragment.getHashCode();
        // create block
        Block block = blockBuilder.setBlockHash(new ByteArray(hash)).setIndexInFile(first)
                .setLines(firstFragment.getStartLine(), lastFragment.getEndLine())
                .setUnit(firstFragment.getStartUnit(), lastFragment.getEndUnit()).build();
        blocks.add(block);
        // remove first statement from hash
        hash -= power * firstFragment.getHashCode();
    }
    return blocks;
}

From source file:com.oculusinfo.annotation.io.AnnotationIOFactory.java

private static List<String> getPyramidTypes(List<ConfigurableFactory<?>> childFactories) {
    List<String> annotationTypes = Lists.newArrayListWithCapacity(childFactories.size());

    //add any child factories
    if (childFactories != null) {
        for (ConfigurableFactory<?> factory : childFactories) {
            String factoryName = factory.getName();
            if (factoryName != null) {
                annotationTypes.add(factoryName);
            }/*from   w  w w  .  ja v  a2 s . co m*/
        }
    }

    return annotationTypes;
}