Example usage for com.google.common.collect Lists newArrayListWithExpectedSize

List of usage examples for com.google.common.collect Lists newArrayListWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithExpectedSize.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithExpectedSize(int estimatedSize) 

Source Link

Document

Creates an ArrayList instance to hold estimatedSize elements, plus an unspecified amount of padding; you almost certainly mean to call #newArrayListWithCapacity (see that method for further advice on usage).

Usage

From source file:org.attribyte.api.pubsub.impl.BroadcastNotifier.java

@Override
public void run() {
    final Timer.Context ctx = broadcastTimer.time();
    try {//from   w  ww  .j  a v  a  2  s .  c om

        if (subscriptionCache != null) {
            List<Subscription> cachedSubscriptions = subscriptionCache
                    .getSubscriptions(notification.getTopic());
            if (cachedSubscriptions != null) {
                sendNotifications(cachedSubscriptions);
                return;
            }
        }

        if (!hub.getDatastore().hasActiveSubscriptions(notification.getTopic().getId())) {
            if (subscriptionCache != null) {
                subscriptionCache.cacheSubscriptions(notification.getTopic(), ImmutableList.<Subscription>of());
            }
            return;
        }

        final List<Subscription> subscriptions = Lists.newArrayListWithExpectedSize(1024);
        final ImmutableList.Builder<Subscription> cachedSubscriptions = subscriptionCache != null
                ? ImmutableList.<Subscription>builder()
                : null;

        long nextSelectId = 0L;
        do {
            nextSelectId = hub.getDatastore().getActiveSubscriptions(notification.getTopic(), subscriptions,
                    nextSelectId, 1024);
            sendNotifications(subscriptions);
            if (subscriptionCache != null) {
                cachedSubscriptions.addAll(subscriptions);
            }
            subscriptions.clear();
        } while (nextSelectId != HubDatastore.LAST_ID);

        if (subscriptionCache != null) {
            subscriptionCache.cacheSubscriptions(notification.getTopic(), cachedSubscriptions.build());
        }

    } catch (DatastoreException de) {
        hub.getLogger().error("Problem selecting subscriptions for notification", de);
    } finally {
        ctx.stop();
    }
}

From source file:defrac.intellij.config.DefracConfig.java

@NotNull
public DefracPlatform[] getTargets() {
    final ArrayList<DefracPlatform> platforms = Lists.newArrayListWithExpectedSize(targets.length);

    for (final String target : targets) {
        if (isNullOrEmpty(target)) {
            continue;
        }//from   w w w. j  a v a2  s  .  c o m

        final DefracPlatform platform = DefracPlatform.byName(target);

        if (platform == null) {
            continue;
        }

        platforms.add(platform);
    }

    return platforms.toArray(new DefracPlatform[platforms.size()]);
}

From source file:edu.umich.robot.soar.ReceivedMessagesIL.java

@Override
public void update() {
    List<Long> seen = Lists.newArrayListWithExpectedSize(messages.size());
    for (Entry<Long, RadioMessage> e : messages.entrySet()) {
        MessageIL m = all.get(e.getKey());
        if (m == null)
            all.put(e.getKey(), new MessageIL(e.getValue()));
        seen.add(e.getKey());/* w w  w.j  av a  2 s. c om*/
    }

    for (Iterator<Entry<Long, MessageIL>> iter = all.entrySet().iterator(); iter.hasNext();) {
        Entry<Long, MessageIL> e = iter.next();
        if (!seen.contains(e.getKey())) {
            e.getValue().destroy();
            iter.remove();
        }
    }
}

From source file:org.graylog2.metrics.MongoDbMetricsReporter.java

@Override
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
        SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters,
        SortedMap<String, Timer> timers) {
    final Date timestamp = new Date(clock.getTime());

    List<DBObject> docs = Lists.newArrayListWithExpectedSize(
            gauges.size() + counters.size() + histograms.size() + meters.size() + timers.size());

    collectGaugeReports(docs, gauges, timestamp);
    collectCounterReports(docs, counters, timestamp);
    collectHistogramReports(docs, histograms, timestamp);
    collectMeterReports(docs, meters, timestamp);
    collectTimerReports(docs, timers, timestamp);

    try {//  ww  w.  ja v a  2  s  . c om
        final DBCollection collection = mongoConnection.getDatabase().getCollection("graylog2_metrics");
        // don't hang on to the data for too long.
        final BasicDBObject indexField = new BasicDBObject("timestamp", 1);
        final BasicDBObject indexOptions = new BasicDBObject("expireAfterSeconds", 5 * 60);
        collection.createIndex(indexField, indexOptions);

        collection.insert(docs, WriteConcern.UNACKNOWLEDGED);
    } catch (Exception e) {
        LOG.warn("Unable to write graylog2 metrics to mongodb. Ignoring this error.", e);
    }
}

From source file:org.dishevelled.bio.align.BiojavaPairwiseAlignment.java

@Override
public Iterable<AlignmentPair> global(final List<Sequence> queries, final List<Sequence> subjects,
        final GapPenalties gapPenalties) {
    checkNotNull(queries);//from ww  w .  ja v  a2 s. c  om
    checkNotNull(subjects);
    checkNotNull(gapPenalties);

    if (queries.isEmpty() || subjects.isEmpty()) {
        return Collections.<AlignmentPair>emptyList();
    }

    NeedlemanWunsch needlemanWunsch = new NeedlemanWunsch(gapPenalties.getMatch(), gapPenalties.getReplace(),
            gapPenalties.getInsert(), gapPenalties.getDelete(), gapPenalties.getExtend(),
            getSubstitutionMatrix());

    List<AlignmentPair> alignmentPairs = Lists.newArrayListWithExpectedSize(queries.size() * subjects.size());
    for (Sequence query : queries) {
        for (Sequence subject : subjects) {
            try {
                AlignmentPair alignmentPair = needlemanWunsch.pairwiseAlignment(query, subject);
                alignmentPairs.add(alignmentPair);
            } catch (BioException e) {
                // todo
            }
        }
    }
    return alignmentPairs;
}

From source file:org.apache.kylin.metadata.expression.ExpressionCountDistributor.java

@Override
public TupleExpression visitCaseCall(CaseTupleExpression caseExpr) {
    List<Pair<TupleFilter, TupleExpression>> whenList = Lists
            .newArrayListWithExpectedSize(caseExpr.getWhenList().size());
    for (Pair<TupleFilter, TupleExpression> entry : caseExpr.getWhenList()) {
        TupleFilter filter = entry.getFirst();
        TupleExpression expression = visitIndependent(entry.getSecond());
        whenList.add(new Pair<>(filter, expression));
    }/*from  w w  w.j  a  v  a 2  s.  c o m*/
    TupleExpression elseExpr = null;
    if (caseExpr.getElseExpr() != null) {
        elseExpr = visitIndependent(caseExpr.getElseExpr());
    }

    if (ifToCnt) {
        ifToCnt = ExpressionColCollector.collectMeasureColumns(caseExpr).isEmpty();
    }
    return new CaseTupleExpression(whenList, elseExpr);
}

From source file:org.eclipse.xtext.xtext.ecoreInference.ProjectAwareXtendXtext2EcorePostProcessor.java

protected ClassLoader createClassLoader(IJavaProject javaProject) throws CoreException {
    List<URL> urls = Lists.newArrayListWithExpectedSize(javaProject.getResolvedClasspath(true).length);
    try {/*from  www .  j  ava2s .  c om*/
        IWorkspaceRoot workspaceRoot = getWorkspace().getRoot();
        urls.addAll(getOutputFolders(javaProject));
        for (IClasspathEntry entry : javaProject.getResolvedClasspath(true)) {
            IPath path = null;
            URL url = null;
            switch (entry.getEntryKind()) {
            case IClasspathEntry.CPE_SOURCE:
                break;
            case IClasspathEntry.CPE_PROJECT:
                IResource project = workspaceRoot.findMember(entry.getPath());
                urls.addAll(getOutputFolders(JavaCore.create(project.getProject())));
                break;
            default:
                path = entry.getPath();
                url = path.toFile().toURI().toURL();
                break;
            }
            if (url != null) {
                urls.add(url);
            }
        }
    } catch (MalformedURLException e) {
        logger.error(
                "Error creating class loader for java project '" + javaProject.getProject().getName() + "'", e);
    }
    return new URLClassLoader(urls.toArray(new URL[urls.size()]), getClass().getClassLoader());
}

From source file:org.apache.pulsar.client.impl.PatternMultiTopicsConsumerImpl.java

@Override
public void run(Timeout timeout) throws Exception {
    if (timeout.isCancelled()) {
        return;/*from w w w .  j  av  a  2 s.  c  om*/
    }

    CompletableFuture<Void> recheckFuture = new CompletableFuture<>();
    List<CompletableFuture<Void>> futures = Lists.newArrayListWithExpectedSize(2);

    client.getLookup().getTopicsUnderNamespace(namespaceName, subscriptionMode).thenAccept(topics -> {
        if (log.isDebugEnabled()) {
            log.debug("Get topics under namespace {}, topics.size: {}", namespaceName.toString(),
                    topics.size());
            topics.forEach(topicName -> log.debug("Get topics under namespace {}, topic: {}",
                    namespaceName.toString(), topicName));
        }

        List<String> newTopics = PulsarClientImpl.topicsPatternFilter(topics, topicsPattern);
        List<String> oldTopics = PatternMultiTopicsConsumerImpl.this.getTopics();

        futures.add(topicsChangeListener.onTopicsAdded(topicsListsMinus(newTopics, oldTopics)));
        futures.add(topicsChangeListener.onTopicsRemoved(topicsListsMinus(oldTopics, newTopics)));
        FutureUtil.waitForAll(futures).thenAccept(finalFuture -> recheckFuture.complete(null))
                .exceptionally(ex -> {
                    log.warn("[{}] Failed to recheck topics change: {}", topic, ex.getMessage());
                    recheckFuture.completeExceptionally(ex);
                    return null;
                });
    });

    // schedule the next re-check task
    recheckPatternTimeout = client.timer().newTimeout(PatternMultiTopicsConsumerImpl.this,
            Math.min(1, conf.getPatternAutoDiscoveryPeriod()), TimeUnit.MINUTES);
}

From source file:org.thingsboard.server.dao.timeseries.BaseTimeseriesService.java

@Override
public ListenableFuture<List<TsKvEntry>> findLatest(TenantId tenantId, EntityId entityId,
        Collection<String> keys) {
    validate(entityId);//  www  . java 2s  .c o m
    List<ListenableFuture<TsKvEntry>> futures = Lists.newArrayListWithExpectedSize(keys.size());
    keys.forEach(key -> Validator.validateString(key, "Incorrect key " + key));
    if (entityId.getEntityType().equals(EntityType.ENTITY_VIEW)) {
        EntityView entityView = entityViewService.findEntityViewById(tenantId, (EntityViewId) entityId);
        List<String> filteredKeys = new ArrayList<>(keys);
        if (entityView.getKeys() != null && entityView.getKeys().getTimeseries() != null
                && !entityView.getKeys().getTimeseries().isEmpty()) {
            filteredKeys.retainAll(entityView.getKeys().getTimeseries());
        }
        List<ReadTsKvQuery> queries = filteredKeys.stream().map(key -> {
            long endTs = entityView.getEndTimeMs() != 0 ? entityView.getEndTimeMs() : Long.MAX_VALUE;
            return new BaseReadTsKvQuery(key, entityView.getStartTimeMs(), endTs, 1, "DESC");
        }).collect(Collectors.toList());

        if (queries.size() > 0) {
            return timeseriesDao.findAllAsync(tenantId, entityView.getEntityId(), queries);
        } else {
            return Futures.immediateFuture(new ArrayList<>());
        }
    }
    keys.forEach(key -> futures.add(timeseriesDao.findLatest(tenantId, entityId, key)));
    return Futures.allAsList(futures);
}

From source file:org.apache.kylin.engine.mr.steps.ExtractDictionaryFromGlobalMapper.java

@Override
protected void doSetup(Context context) throws IOException {
    Configuration conf = context.getConfiguration();
    bindCurrentConfiguration(conf);/*ww  w  .ja  va  2 s . c  o  m*/
    config = AbstractHadoopJob.loadKylinPropsAndMetadata();

    cubeName = conf.get(BatchConstants.CFG_CUBE_NAME);
    cube = CubeManager.getInstance(config).getCube(cubeName);
    cubeDesc = cube.getDescriptor();
    cubeSeg = cube.getSegmentById(conf.get(BatchConstants.CFG_CUBE_SEGMENT_ID));
    flatTableInputFormat = MRUtil.getBatchCubingInputSide(cubeSeg).getFlatTableInputFormat();

    intermediateTableDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSeg),
            cubeDesc);

    globalColumns = cubeDesc.getAllGlobalDictColumns();
    globalColumnIndex = new int[globalColumns.size()];
    globalColumnValues = Lists.newArrayListWithExpectedSize(globalColumns.size());

    for (int i = 0; i < globalColumns.size(); i++) {
        TblColRef colRef = globalColumns.get(i);
        int columnIndexOnFlatTbl = intermediateTableDesc.getColumnIndex(colRef);
        globalColumnIndex[i] = columnIndexOnFlatTbl;
        globalColumnValues.add(Sets.<String>newHashSet());
    }

    splitKey = DictionaryGetterUtil.getInputSplitSignature(cubeSeg, context.getInputSplit());
}