List of usage examples for com.google.common.collect Lists newArrayListWithCapacity
@GwtCompatible(serializable = true) public static <E> ArrayList<E> newArrayListWithCapacity(int initialArraySize)
From source file:co.cask.cdap.internal.io.ReflectionPutWriter.java
public ReflectionPutWriter(Schema schema) { super(schema); Preconditions.checkArgument(schema.getType() == Schema.Type.RECORD, "Schema must be a record."); List<Schema.Field> schemaFields = schema.getFields(); int numFields = schemaFields.size(); Preconditions.checkArgument(numFields > 0, "Record must contain at least one field."); this.fieldNames = Lists.newArrayListWithCapacity(numFields); for (Schema.Field schemaField : schemaFields) { this.fieldNames.add(schemaField.getName()); }//from w ww . j a v a2 s .c o m this.index = 0; }
From source file:com.google.devtools.j2objc.translate.TypeSorter.java
public static void sortTypes(CompilationUnit unit) { List<AbstractTypeDeclaration> typeNodes = unit.getTypes(); Map<String, AbstractTypeDeclaration> nodeMap = Maps.newHashMap(); LinkedHashMap<String, ITypeBinding> bindingMap = Maps.newLinkedHashMap(); for (AbstractTypeDeclaration node : typeNodes) { ITypeBinding typeBinding = node.getTypeBinding(); String key = typeBinding.getKey(); nodeMap.put(key, node);//ww w . j av a2s . co m bindingMap.put(key, typeBinding); } Multimap<String, String> superTypes = findSuperTypes(bindingMap); ArrayList<String> rootTypes = Lists.newArrayListWithCapacity(typeNodes.size()); for (String type : bindingMap.keySet()) { if (!superTypes.containsValue(type)) { rootTypes.add(type); } } typeNodes.clear(); while (!rootTypes.isEmpty()) { String nextType = rootTypes.remove(rootTypes.size() - 1); typeNodes.add(0, nodeMap.get(nextType)); for (String superType : superTypes.removeAll(nextType)) { if (!superTypes.containsValue(superType)) { rootTypes.add(superType); } } } }
From source file:ch.ethz.bsse.quasirecomb.modelsampling.SingleModelSampling.java
@Override public List<SampledRead> call() { List<SampledRead> list = Lists.newArrayListWithCapacity(this.amount); for (int i = 0; i < amount; i++) { int L = or.getL(); int n = or.getn(); int K = or.getK(); double[][][] rho = or.getRho(); double[] pi = or.getPi()[0]; double[][][] mu = or.getMu(); Frequency<Integer>[][] rhoArray = new Frequency[L - 1][K]; Frequency<Byte>[][] muArray = new Frequency[L][K]; int watsonStart = 0, watsonLength = 0, watsonEnd = 0, crickStart = 0, crickEnd = 0; watsonStart = startF.roll();//from w w w . j a v a 2 s . com Map<Integer, Double> watsonLengthMap = new HashMap<>(); for (Map.Entry<Integer, Double> e : this.tauOmega.getTauWatsonMap().get(watsonStart).entrySet()) { watsonLengthMap.put(e.getKey(), e.getValue()); } Frequency<Integer> watsonLengthF = new Frequency<>(watsonLengthMap); watsonLength = watsonLengthF.roll(); watsonEnd = watsonStart + watsonLength; boolean localPaired = paired && this.tauOmega.getOmegaWatsonMap().get(watsonEnd) != null; if (localPaired) { Map<Integer, Double> watsonOmegaMap = new HashMap<>(); for (Map.Entry<Integer, Double> e : this.tauOmega.getOmegaWatsonMap().get(watsonEnd).entrySet()) { watsonOmegaMap.put(e.getKey(), e.getValue()); } Frequency<Integer> inLength = new Frequency<>(watsonOmegaMap); int insertLength = inLength.roll(); crickStart = watsonEnd + insertLength; Map<Integer, Double> crickTauMap = new HashMap<>(); if (this.tauOmega.getTauCrickMap().get(crickStart) == null) { System.err.println(""); } for (Map.Entry<Integer, Double> e : this.tauOmega.getTauCrickMap().get(crickStart).entrySet()) { crickTauMap.put(e.getKey(), e.getValue()); } Frequency<Integer> crickL = new Frequency<>(crickTauMap); int crickLength = crickL.roll(); crickEnd = crickStart + crickLength; } Map<Integer, Double> piMap = new HashMap<>(); for (int k = 0; k < K; k++) { piMap.put(k, pi[k]); } Frequency<Integer> piF = new Frequency<>(piMap); int k = piF.roll(); List<Byte> read1 = new LinkedList<>(); for (int j = watsonStart; j < watsonEnd; j++) { if (j > 0) { Map<Integer, Double> rhoMap = new HashMap<>(); for (int l = 0; l < K; l++) { rhoMap.put(l, rho[j - 1][k][l]); } Frequency<Integer> rhoF = new Frequency<>(rhoMap); rhoArray[j - 1][k] = rhoF; k = rhoArray[j - 1][k].roll(); } if (muArray[j][k] == null) { Map<Byte, Double> muMap = new HashMap<>(); for (byte v = 0; v < n; v++) { muMap.put(v, mu[j][k][v]); } Frequency<Byte> muF = new Frequency<>(muMap); muArray[j][k] = muF; } read1.add(muArray[j][k].roll()); } if (localPaired) { List<Byte> read2 = new LinkedList<>(); for (int j = watsonEnd; j < crickStart; j++) { Map<Integer, Double> rhoMap = new HashMap<>(); for (int l = 0; l < K; l++) { rhoMap.put(l, rho[j - 1][k][l]); } Frequency<Integer> rhoF = new Frequency<>(rhoMap); rhoArray[j - 1][k] = rhoF; k = rhoArray[j - 1][k].roll(); } for (int j = crickStart; j < crickEnd; j++) { Map<Integer, Double> rhoMap = new HashMap<>(); for (int l = 0; l < K; l++) { rhoMap.put(l, rho[j - 1][k][l]); } Frequency<Integer> rhoF = new Frequency<>(rhoMap); rhoArray[j - 1][k] = rhoF; k = rhoArray[j - 1][k].roll(); if (muArray[j][k] == null) { Map<Byte, Double> muMap = new HashMap<>(); for (byte v = 0; v < n; v++) { muMap.put(v, mu[j][k][v]); } Frequency<Byte> muF = new Frequency<>(muMap); muArray[j][k] = muF; } read2.add(muArray[j][k].roll()); } list.add(new SampledRead(read1, read2, watsonStart, watsonEnd, crickStart, crickEnd)); } else { list.add(new SampledRead(read1, watsonStart, watsonEnd)); } } return list; }
From source file:org.lenskit.data.output.CSVRatingWriter.java
@Override public void writeRating(Rating r) throws IOException { List<Object> row = Lists.newArrayListWithCapacity(4); row.add(r.getUserId());//ww w .j av a 2s .com row.add(r.getItemId()); if (r.hasValue()) { row.add(r.getValue()); } else { row.add(null); } if (includeTimestamps) { row.add(r.getTimestamp()); } tableWriter.writeRow(row); }
From source file:net.opentsdb.contrib.tsquare.web.view.GraphiteRawResponseWriter.java
@Override public void write(final AnnotatedDataPoints annotatedPoints, final ResponseContext context) throws IOException { final DataPoints points = annotatedPoints.getDataPoints(); long maxTimestamp = Long.MIN_VALUE; long minTimestamp = Long.MAX_VALUE; long lastTimestamp = 0; long diffSum = 0; long totalPoints = 0; // XXX: Maybe just iterate over DataPoints again instead of caching these? final List<Double> values = Lists.newArrayListWithCapacity(points.size()); for (final DataPoint p : points) { // Graphite timestamp are always in seconds. final long currentTimestamp = TimeUnit.MILLISECONDS.toSeconds(p.timestamp()); // Find the max/min timestamps; we need this for part of the raw response. maxTimestamp = Math.max(maxTimestamp, currentTimestamp); minTimestamp = Math.min(minTimestamp, currentTimestamp); // The Graphite raw format requires a series step value, which has on // corollary in OpenTSDB. So we estimate it. Sum the differences // between consecutive timestamps in order to compute average separation. if (lastTimestamp > 0) { // ... skips the first iteration. long diff = currentTimestamp - lastTimestamp; if (diff >= 0) { // ... accounts for out-of-order results. diffSum += diff;//from w w w.ja va 2 s . c o m totalPoints++; } } lastTimestamp = currentTimestamp; values.add(TsWebUtils.asDoubleObject(p)); } final PrintWriter writer = context.getResponse().getWriter(); writer.print(points.metricName()); writer.print(','); writer.print(minTimestamp); writer.print(','); writer.print(maxTimestamp); writer.print(','); writer.print((diffSum / totalPoints)); writer.print('|'); writer.println(Joiner.on(',').join(values)); }
From source file:org.apache.mahout.clustering.streaming.cluster.DataUtils.java
/** * Samples numDatapoints vectors of numDimensions cardinality centered around the vertices of a * numDimensions order hypercube. The distribution of points around these vertices is * multinormal with a radius of distributionRadius. * A hypercube of numDimensions has 2^numDimensions vertices. Keep this in mind when clustering * the data./*from w ww.ja v a 2 s. c om*/ * * Note that it is almost always the case that you want to call RandomUtils.useTestSeed() before * generating test data. This means that you can't generate data in the declaration of a static * variable because such initializations happen before any @BeforeClass or @Before setup methods * are called. * * * @param numDimensions number of dimensions of the vectors to be generated. * @param numDatapoints number of data points to be generated. * @param distributionRadius radius of the distribution around the hypercube vertices. * @return a pair of lists, whose first element is the sampled points and whose second element * is the list of hypercube vertices that are the means of each distribution. */ public static Pair<List<Centroid>, List<Centroid>> sampleMultiNormalHypercube(int numDimensions, int numDatapoints, double distributionRadius) { int pow2N = 1 << numDimensions; // Construct data samplers centered on the corners of a unit hypercube. // Additionally, keep the means of the distributions that will be generated so we can compare // these to the ideal cluster centers. List<Centroid> mean = Lists.newArrayListWithCapacity(pow2N); List<MultiNormal> rowSamplers = Lists.newArrayList(); for (int i = 0; i < pow2N; i++) { Vector v = new DenseVector(numDimensions); // Select each of the num int pow2J = 1 << (numDimensions - 1); for (int j = 0; j < numDimensions; ++j) { v.set(j, 1.0 / pow2J * (i & pow2J)); pow2J >>= 1; } mean.add(new Centroid(i, v, 1)); rowSamplers.add(new MultiNormal(distributionRadius, v)); } // Sample the requested number of data points. List<Centroid> data = Lists.newArrayListWithCapacity(numDatapoints); for (int i = 0; i < numDatapoints; ++i) { data.add(new Centroid(i, rowSamplers.get(i % pow2N).sample(), 1)); } return new Pair<List<Centroid>, List<Centroid>>(data, mean); }
From source file:com.inductiveautomation.opcua.sdk.server.api.MethodManager.java
/** * Invoke one or more methods belonging to this {@link MethodManager}. * * @param requests The {@link CallMethodRequest}s for the methods to invoke. * @param future The future to complete with the {@link CallMethodResult}s. *//*from w ww .j a v a 2 s . c o m*/ default void call(List<CallMethodRequest> requests, CompletableFuture<List<CallMethodResult>> future) { List<CompletableFuture<CallMethodResult>> results = Lists.newArrayListWithCapacity(requests.size()); for (CallMethodRequest request : requests) { MethodInvocationHandler handler = getInvocationHandler(request.getMethodId()) .orElse(new NodeIdUnknownHandler()); CompletableFuture<CallMethodResult> resultFuture = new CompletableFuture<>(); try { handler.invoke(request, resultFuture); } catch (Throwable t) { LoggerFactory.getLogger(getClass()).error( "Uncaught Throwable invoking method handler for methodId={}.", request.getMethodId(), t); resultFuture.complete(new CallMethodResult(new StatusCode(StatusCodes.Bad_InternalError), new StatusCode[0], new DiagnosticInfo[0], new Variant[0])); } results.add(resultFuture); } sequence(results).thenAccept(future::complete); }
From source file:co.cask.cdap.hive.stream.StreamRecordReader.java
StreamRecordReader(InputSplit split, JobConf conf) throws IOException { this.inputSplit = (StreamInputSplit) split; this.events = Lists.newArrayListWithCapacity(1); this.reader = createReader(FileSystem.get(conf), inputSplit); reader.initialize();/*from w ww .j a v a 2s .c om*/ readFilter = new TimeRangeReadFilter(inputSplit.getStartTime(), inputSplit.getEndTime()); }
From source file:com.cloudera.oryx.als.serving.MultiRescorerProvider.java
@Override public Rescorer getRecommendRescorer(String[] userIDs, OryxRecommender recommender, String... args) { List<Rescorer> rescorers = Lists.newArrayListWithCapacity(providers.length); for (RescorerProvider provider : providers) { Rescorer rescorer = provider.getRecommendRescorer(userIDs, recommender, args); if (rescorer != null) { rescorers.add(rescorer);/* w ww .jav a2 s .co m*/ } } return buildRescorer(rescorers); }
From source file:com.hengyi.japp.execution.Util.java
public static void queryCommand(CriteriaBuilder cb, CriteriaQuery<?> cq, Root<PayOrder> root, PayOrderQueryCommand command) {//from w w w .ja v a 2 s. co m List<Predicate> ps = Lists.newArrayListWithCapacity(3); if (command.getCustomer() != null) { ps.add(cb.equal(root.get(PayOrder_.customer), command.getCustomer())); } if (command.getPayDateStart() != null) { ps.add(cb.greaterThanOrEqualTo(root.get(PayOrder_.payDate), command.getPayDateStart())); } if (command.getPayDateEnd() != null) { ps.add(cb.lessThanOrEqualTo(root.get(PayOrder_.payDate), command.getPayDateStart())); } cq.where(ps.toArray(new Predicate[ps.size()])); }