Example usage for com.google.common.collect Lists newArrayListWithExpectedSize

List of usage examples for com.google.common.collect Lists newArrayListWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayListWithExpectedSize.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayListWithExpectedSize(int estimatedSize) 

Source Link

Document

Creates an ArrayList instance to hold estimatedSize elements, plus an unspecified amount of padding; you almost certainly mean to call #newArrayListWithCapacity (see that method for further advice on usage).

Usage

From source file:com.spotify.cassandra.opstools.SSTableTimestampViewer.java

/**
 * @param args a list of sstables whose metadata we're interested in
 *///from ww w  .j a va  2 s .  c o  m
public static void main(String[] args) throws IOException {
    PrintStream out = System.out;
    if (args.length == 0) {
        out.println("Usage: spcassandra-sstable-timestamp <sstable filenames>");
        System.exit(1);
    }

    List<TimeMetadata> metadata = Lists.newArrayListWithExpectedSize(args.length);
    for (String fname : args) {
        Descriptor descriptor = Descriptor.fromFilename(fname);
        SSTableMetadata md = SSTableMetadata.serializer.deserialize(descriptor).left;
        metadata.add(new TimeMetadata(descriptor.toString(), md.minTimestamp, md.maxTimestamp,
                new java.io.File(descriptor.baseFilename() + "-Data.db").length()));
    }

    Collections.sort(metadata, new Comparator<TimeMetadata>() {
        public int compare(TimeMetadata o1, TimeMetadata o2) {
            return Long.compare(o1.minTimestamp, o2.minTimestamp);
        }
    });

    long[] timespanHistogram = new long[metadata.size() + 1];
    SortedSet<TimeMetadata> currentOverlaps = new TreeSet<>(new Comparator<TimeMetadata>() {
        public int compare(TimeMetadata o1, TimeMetadata o2) {
            return Long.compare(o1.maxTimestamp, o2.maxTimestamp);
        }
    });

    List<Interval<Long, Integer>> intervals = Lists.newArrayList();

    long currentTime = 0;
    boolean wasMax = false;
    for (TimeMetadata md : metadata) {
        while (currentOverlaps.size() > 0 && currentOverlaps.first().maxTimestamp < md.minTimestamp) {
            intervals.add(new Interval<>(currentTime, !wasMax, currentOverlaps.first().maxTimestamp, true,
                    currentOverlaps.size()));
            timespanHistogram[currentOverlaps.size()] += currentOverlaps.first().maxTimestamp - currentTime;
            currentTime = currentOverlaps.first().maxTimestamp;
            wasMax = true;
            currentOverlaps.remove(currentOverlaps.first());
        }
        if (currentTime != 0) {
            intervals.add(new Interval<>(currentTime, !wasMax, md.minTimestamp, false, currentOverlaps.size()));
            timespanHistogram[currentOverlaps.size()] += md.minTimestamp - currentTime;
        }
        currentTime = md.minTimestamp;
        wasMax = false;
        currentOverlaps.add(md);
    }
    while (currentOverlaps.size() > 0) {
        intervals.add(new Interval<>(currentTime, !wasMax, currentOverlaps.first().maxTimestamp, true,
                currentOverlaps.size()));
        timespanHistogram[currentOverlaps.size()] += currentOverlaps.first().maxTimestamp - currentTime;
        currentTime = currentOverlaps.first().maxTimestamp;
        wasMax = true;
        currentOverlaps.remove(currentOverlaps.first());
    }

    for (TimeMetadata md : metadata)
        out.println(md);

    for (Interval<Long, Integer> interval : intervals)
        out.println(interval);
    out.println();

    for (int i = 0; i < timespanHistogram.length; i++)
        out.printf("Total time covered by %s sstables: %s (%.2f%%)%n", i, timespanHistogram[i],
                (double) timespanHistogram[i] / (currentTime - metadata.get(0).minTimestamp) * 100);
}

From source file:org.apache.mahout.knn.BruteSpeedCheck.java

public static void main(String[] args) {
    Sampler<Vector> rand = new MultiNormal(new ConstantVector(1, VECTOR_DIMENSION));
    List<WeightedVector> referenceVectors = Lists.newArrayListWithExpectedSize(REFERENCE_SIZE);
    for (int i = 0; i < REFERENCE_SIZE; ++i) {
        referenceVectors.add(new WeightedVector(rand.sample(), 1, i));
    }//from   w  ww  .  jav a  2  s  .c  om
    System.out.printf("Generated reference matrix.\n");

    List<WeightedVector> queryVectors = Lists.newArrayListWithExpectedSize(QUERY_SIZE);
    for (int i = 0; i < QUERY_SIZE; ++i) {
        queryVectors.add(new WeightedVector(rand.sample(), 1, i));
    }
    System.out.printf("Generated query matrix.\n");

    for (int threads : new int[] { 1, 2, 3, 4, 5, 6, 10, 20, 50 }) {
        for (int block : new int[] { 1, 10, 50 }) {
            BruteSearch search = new BruteSearch(new EuclideanDistanceMeasure());
            search.addAll(referenceVectors);
            long t0 = System.nanoTime();
            search.search(queryVectors, block, threads);
            long t1 = System.nanoTime();
            System.out.printf("%d\t%d\t%.2f\n", threads, block, (t1 - t0) / 1e9);
        }
    }
}

From source file:com.twitter.distributedlog.basic.AtomicWriter.java

public static void main(String[] args) throws Exception {
    if (args.length < 3) {
        System.out.println(HELP);
        return;/*from  www .  j  a v  a2  s  . c  o m*/
    }

    String finagleNameStr = args[0];
    String streamName = args[1];
    String[] messages = new String[args.length - 2];
    System.arraycopy(args, 2, messages, 0, messages.length);

    DistributedLogClient client = DistributedLogClientBuilder.newBuilder()
            .clientId(ClientId.apply("atomic-writer")).name("atomic-writer").thriftmux(true)
            .finagleNameStr(finagleNameStr).build();

    final LogRecordSet.Writer recordSetWriter = LogRecordSet.newWriter(16 * 1024, Type.NONE);
    List<Future<DLSN>> writeFutures = Lists.newArrayListWithExpectedSize(messages.length);
    for (String msg : messages) {
        final String message = msg;
        ByteBuffer msgBuf = ByteBuffer.wrap(msg.getBytes(UTF_8));
        Promise<DLSN> writeFuture = new Promise<DLSN>();
        writeFuture.addEventListener(new FutureEventListener<DLSN>() {
            @Override
            public void onFailure(Throwable cause) {
                System.out.println("Encountered error on writing data");
                cause.printStackTrace(System.err);
                Runtime.getRuntime().exit(0);
            }

            @Override
            public void onSuccess(DLSN dlsn) {
                System.out.println("Write '" + message + "' as record " + dlsn);
            }
        });
        recordSetWriter.writeRecord(msgBuf, writeFuture);
        writeFutures.add(writeFuture);
    }
    FutureUtils.result(client.writeRecordSet(streamName, recordSetWriter)
            .addEventListener(new FutureEventListener<DLSN>() {
                @Override
                public void onFailure(Throwable cause) {
                    recordSetWriter.abortTransmit(cause);
                    System.out.println("Encountered error on writing data");
                    cause.printStackTrace(System.err);
                    Runtime.getRuntime().exit(0);
                }

                @Override
                public void onSuccess(DLSN dlsn) {
                    recordSetWriter.completeTransmit(dlsn.getLogSegmentSequenceNo(), dlsn.getEntryId(),
                            dlsn.getSlotId());
                }
            }));
    FutureUtils.result(Future.collect(writeFutures));
    client.close();
}

From source file:org.apache.distributedlog.basic.AtomicWriter.java

public static void main(String[] args) throws Exception {
    if (args.length < 3) {
        System.out.println(HELP);
        return;/*from www.  j a  va 2  s.c o  m*/
    }

    String finagleNameStr = args[0];
    String streamName = args[1];
    String[] messages = new String[args.length - 2];
    System.arraycopy(args, 2, messages, 0, messages.length);

    DistributedLogClient client = DistributedLogClientBuilder.newBuilder()
            .clientId(ClientId$.MODULE$.apply("atomic-writer")).name("atomic-writer").thriftmux(true)
            .finagleNameStr(finagleNameStr).build();

    final LogRecordSet.Writer recordSetWriter = LogRecordSet.newWriter(16 * 1024, Type.NONE);
    List<Future<DLSN>> writeFutures = Lists.newArrayListWithExpectedSize(messages.length);
    for (String msg : messages) {
        final String message = msg;
        ByteBuffer msgBuf = ByteBuffer.wrap(msg.getBytes(UTF_8));
        Promise<DLSN> writeFuture = new Promise<DLSN>();
        writeFuture.addEventListener(new FutureEventListener<DLSN>() {
            @Override
            public void onFailure(Throwable cause) {
                System.out.println("Encountered error on writing data");
                cause.printStackTrace(System.err);
                Runtime.getRuntime().exit(0);
            }

            @Override
            public void onSuccess(DLSN dlsn) {
                System.out.println("Write '" + message + "' as record " + dlsn);
            }
        });
        recordSetWriter.writeRecord(msgBuf, writeFuture);
        writeFutures.add(writeFuture);
    }
    FutureUtils.result(client.writeRecordSet(streamName, recordSetWriter)
            .addEventListener(new FutureEventListener<DLSN>() {
                @Override
                public void onFailure(Throwable cause) {
                    recordSetWriter.abortTransmit(cause);
                    System.out.println("Encountered error on writing data");
                    cause.printStackTrace(System.err);
                    Runtime.getRuntime().exit(0);
                }

                @Override
                public void onSuccess(DLSN dlsn) {
                    recordSetWriter.completeTransmit(dlsn.getLogSegmentSequenceNo(), dlsn.getEntryId(),
                            dlsn.getSlotId());
                }
            }));
    FutureUtils.result(Future.collect(writeFutures));
    client.close();
}

From source file:org.hmahout.example.NetflixDatasetConverter.java

public static void main(String[] args) throws IOException {

    if (args.length != 4) {
        System.err.println("Usage: NetflixDatasetConverter /path/to/training_set/ /path/to/qualifying.txt "
                + "/path/to/judging.txt /path/to/destination");
        return;//from  w w  w.  ja va 2  s  .c  o m
    }

    String trainingDataDir = args[0];
    String qualifyingTxt = args[1];
    String judgingTxt = args[2];
    Path outputPath = new Path(args[3]);

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(outputPath.toUri(), conf);

    log.info("Creating training set at {}/trainingSet/ratings.tsv ...", outputPath);
    BufferedWriter writer = null;
    try {
        FSDataOutputStream outputStream = fs.create(new Path(outputPath, "trainingSet/ratings.tsv"));
        writer = new BufferedWriter(new OutputStreamWriter(outputStream, Charsets.UTF_8));

        int ratingsProcessed = 0;
        for (File movieRatings : new File(trainingDataDir).listFiles()) {
            FileLineIterator lines = null;
            try {
                lines = new FileLineIterator(movieRatings);
                boolean firstLineRead = false;
                String movieID = null;
                while (lines.hasNext()) {
                    String line = lines.next();
                    if (firstLineRead) {
                        String[] tokens = SEPARATOR.split(line);
                        String userID = tokens[0];
                        String rating = tokens[1];
                        writer.write(userID + TAB + movieID + TAB + rating + NEWLINE);
                        ratingsProcessed++;
                        if (ratingsProcessed % 1000000 == 0) {
                            log.info("{} ratings processed...", ratingsProcessed);
                        }
                    } else {
                        movieID = line.replaceAll(MOVIE_DENOTER, "");
                        firstLineRead = true;
                    }
                }
            } finally {
                Closeables.close(lines, true);
            }
        }
        log.info("{} ratings processed. done.", ratingsProcessed);
    } finally {
        Closeables.close(writer, false);
    }

    log.info("Reading probes...");
    List<Preference> probes = Lists.newArrayListWithExpectedSize(2817131);
    long currentMovieID = -1;
    for (String line : new FileLineIterable(new File(qualifyingTxt))) {
        if (line.contains(MOVIE_DENOTER)) {
            currentMovieID = Long.parseLong(line.replaceAll(MOVIE_DENOTER, ""));
        } else {
            long userID = Long.parseLong(SEPARATOR.split(line)[0]);
            probes.add(new GenericPreference(userID, currentMovieID, 0));
        }
    }
    log.info("{} probes read...", probes.size());

    log.info("Reading ratings, creating probe set at {}/probeSet/ratings.tsv ...", outputPath);
    writer = null;
    try {
        FSDataOutputStream outputStream = fs.create(new Path(outputPath, "probeSet/ratings.tsv"));
        writer = new BufferedWriter(new OutputStreamWriter(outputStream, Charsets.UTF_8));

        int ratingsProcessed = 0;
        for (String line : new FileLineIterable(new File(judgingTxt))) {
            if (line.contains(MOVIE_DENOTER)) {
                currentMovieID = Long.parseLong(line.replaceAll(MOVIE_DENOTER, ""));
            } else {
                float rating = Float.parseFloat(SEPARATOR.split(line)[0]);
                Preference pref = probes.get(ratingsProcessed);
                Preconditions.checkState(pref.getItemID() == currentMovieID);
                ratingsProcessed++;
                writer.write(pref.getUserID() + TAB + pref.getItemID() + TAB + rating + NEWLINE);
                if (ratingsProcessed % 1000000 == 0) {
                    log.info("{} ratings processed...", ratingsProcessed);
                }
            }
        }
        log.info("{} ratings processed. done.", ratingsProcessed);
    } finally {
        Closeables.close(writer, false);
    }
}

From source file:com.attribyte.essem.StoredGraphParser.java

/**
 * Parses a stored graph response./*from   w ww.j  a  v  a  2 s . co m*/
 * @param esObject The ES response object.
 * @return The list of graphs.
 */
public static List<StoredGraph> parseGraphs(ObjectNode esObject) throws IOException {
    List<StoredGraph> graphList = Lists.newArrayListWithExpectedSize(16);
    JsonNode hitsObj = esObject.get("hits");
    if (hitsObj != null) {
        JsonNode hitsArr = hitsObj.get("hits");
        if (hitsArr != null) {
            for (JsonNode hitObj : hitsArr) {
                JsonNode fieldsObj = hitObj.get("fields");
                if (fieldsObj != null) {
                    graphList.add(StoredGraph.fromJSON(fieldsObj));
                }
            }
        }
    }
    return graphList;
}

From source file:org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.builder.MessageBuilderHelper.java

static Collection<String> pathTo(EdgeState edge, boolean includeLast) {
    List<List<EdgeState>> acc = Lists.newArrayListWithExpectedSize(1);
    pathTo(edge, Lists.<EdgeState>newArrayList(), acc, Sets.<NodeState>newHashSet());
    List<String> result = Lists.newArrayListWithCapacity(acc.size());
    for (List<EdgeState> path : acc) {
        EdgeState target = Iterators.getLast(path.iterator());
        StringBuilder sb = new StringBuilder();
        if (target.getSelector().getDependencyMetadata().isConstraint()) {
            sb.append("Constraint path ");
        } else {//w w w  . ja va  2s.co  m
            sb.append("Dependency path ");
        }
        boolean first = true;
        for (EdgeState e : path) {
            if (!first) {
                sb.append(" --> ");
            }
            first = false;
            ModuleVersionIdentifier id = e.getFrom().getResolvedConfigurationId().getId();
            sb.append('\'').append(id).append('\'');
        }
        if (includeLast) {
            sb.append(" --> ");
            ModuleIdentifier moduleId = edge.getSelector().getTargetModule().getId();
            sb.append('\'').append(moduleId.getGroup()).append(':').append(moduleId.getName()).append('\'');
        }
        result.add(sb.toString());
    }
    return result;
}

From source file:com.attribyte.essem.model.DisplayTZ.java

/**
 * Parse a file of the format [id] = [name].
 * @param file The file to parse./*from   w ww  .  ja  v  a 2 s.c  om*/
 * @return The list of display zones.
 * @throws IOException on parse error.
 */
public static final List<DisplayTZ> parse(final File file) throws IOException {
    List<String> lines = CharStreams.readLines(new FileReader(file));
    List<DisplayTZ> tzList = Lists.newArrayListWithExpectedSize(lines.size());
    for (String line : lines) {
        line = line.trim();
        if (line.length() > 0 && !line.startsWith("#")) {
            Iterator<String> iter = eqSplitter.split(line).iterator();
            if (iter.hasNext()) {
                String id = iter.next();
                if (iter.hasNext()) {
                    String display = iter.next();
                    tzList.add(new DisplayTZ(id, display));
                }
            }
        }
    }
    return tzList;
}

From source file:com.google.visualization.datasource.query.parser.GenericsHelper.java

/**
 * Transforms, in an unsafe way, a typed List from a raw ArrayList.
 *
 * @param list The ArrayList to transform.
 *
 * @return The new List<T> containing all the elements in list.
 *//*from w w  w  .j av  a 2s .  c  om*/
/* package */ static <T> List<T> makeTypedList(ArrayList<? extends T> list) {
    List<T> result = Lists.newArrayListWithExpectedSize(list.size());
    for (T obj : list) {
        result.add(obj);
    }
    return result;
}

From source file:org.apache.bigtop.bigpetstore.datagenerator.datamodels.Pair.java

public static <A, B> List<Pair<A, B>> create(Map<A, B> map) {
    List<Pair<A, B>> list = Lists.newArrayListWithExpectedSize(map.size());
    for (Map.Entry<A, B> entry : map.entrySet())
        list.add(Pair.create(entry.getKey(), entry.getValue()));
    return list;/*from w  ww. j a va 2s.  c om*/
}