Example usage for com.google.common.collect Lists newArrayList

List of usage examples for com.google.common.collect Lists newArrayList

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayList.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayList() 

Source Link

Document

Creates a mutable, empty ArrayList instance (for Java 6 and earlier).

Usage

From source file:poc.streaming.rddtostream.JavaQueueStream_src.java

public static void main(String[] args) throws Exception {
    //StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaQueueStream");
    sparkConf.setMaster("local[2]");
    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));

    // Create the queue through which RDDs can be pushed to a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<>();
    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);//from w  w w .j  a  va  2s  .co m
    }
    for (int i = 0; i < 30; i++) {
        rddQueue.add(ssc.sparkContext().parallelize(list));
    }

    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair((PairFunction<Integer, Integer, Integer>) line -> new Tuple2(line, 1));
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey((Function2<Integer, Integer, Integer>) (i1, i2) -> i1 + i2);
    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.andado.spark.examples.streaming.JavaQueueStream.java

public static void main(String[] args) throws Exception {

    //StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaQueueStream");

    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));

    // Create the queue through which RDDs can be pushed to
    // a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<>();

    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);/*from   w w  w.j a  v  a 2 s .  co  m*/
    }

    for (int i = 0; i < 30; i++) {
        rddQueue.add(ssc.sparkContext().parallelize(list));
    }

    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair(new PairFunction<Integer, Integer, Integer>() {
                @Override
                public Tuple2<Integer, Integer> call(Integer i) {
                    return new Tuple2<>(i % 10, 1);
                }
            });
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer i1, Integer i2) {
                    return i1 + i2;
                }
            });

    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:cn.com.bsfit.frms.spark.streaming.QueueStream.java

public static void main(String[] args) throws Exception {

    // StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaQueueStream").setMaster("local");

    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));

    // Create the queue through which RDDs can be pushed to
    // a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<>();

    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);/*  w  w  w.ja v  a2s . c  o m*/
    }

    for (int i = 0; i < 30; i++) {
        rddQueue.add(ssc.sparkContext().parallelize(list));
    }

    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair(new PairFunction<Integer, Integer, Integer>() {
                private static final long serialVersionUID = 1L;

                @Override
                public Tuple2<Integer, Integer> call(Integer i) {
                    return new Tuple2<>(i % 10, 1);
                }
            });
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey(new Function2<Integer, Integer, Integer>() {
                private static final long serialVersionUID = 1L;

                @Override
                public Integer call(Integer i1, Integer i2) {
                    return i1 + i2;
                }
            });

    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
    ssc.close();
}

From source file:org.waveprotocol.pst.PstMain.java

public static void main(String[] args) {
    PstCommandLine cl = null;/*w  w w.  j  a v a  2  s .c  o m*/
    try {
        cl = new PstCommandLine(args);
    } catch (ParseException e) {
        System.err.println("Error parsing command line arguments: " + e.getMessage());
        PstCommandLine.printHelp();
        System.exit(1);
    }

    if (cl.hasHelp()) {
        PstCommandLine.printHelp();
        System.exit(0);
    }

    FileDescriptor fd = PstFileDescriptor.load(cl.getProtoFile().getPath(),
            cl.shouldSaveJava() ? cl.getOutputDir() : Files.createTempDir(), cl.getProtoPath());
    if (fd == null) {
        System.err.println("Error: cannot find file descriptor for " + cl.getProtoFile());
        System.exit(1);
    }

    boolean failed = false;

    List<File> templates = Lists.newArrayList();
    for (File maybeTemplate : cl.getTemplateFiles()) {
        if (maybeTemplate.exists()) {
            templates.add(maybeTemplate);
        } else {
            System.err.println("ERROR: template " + maybeTemplate.getPath() + " does not exist.");
            failed = true;
        }
    }

    Pst pst = new Pst(cl.getOutputDir(), fd, cl.getStyler(), templates, cl.shouldSavePreStyled(),
            cl.shouldUseInt52());
    try {
        pst.run();
    } catch (PstException e) {
        System.err.printf("ERROR: generation failed for %d/%d templates:\n", e.getTemplateExceptions().size(),
                templates.size());
        for (PstException.TemplateException te : e.getTemplateExceptions()) {
            System.err.println('\n' + te.getTemplateName() + " failed:");
            te.printStackTrace(System.err);
        }
        failed = true;
    }

    if (failed) {
        System.exit(1);
    }
}

From source file:com.sparkexample.JavaQueueStream.java

public static void main(String[] args) throws Exception {

    // StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaQueueStream").setSparkHome("/root/spark/");
    sparkConf.setMaster("local[4]");

    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));

    // Create the queue through which RDDs can be pushed to
    // a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<>();

    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);/*from   ww  w .j  a  va  2s .c  o m*/
    }

    for (int i = 0; i < 30; i++) {
        JavaRDD<Integer> parallelizedList = ssc.sparkContext().parallelize(list);
        rddQueue.add(parallelizedList);
    }
    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair(new PairFunction<Integer, Integer, Integer>() {
                public Tuple2<Integer, Integer> call(Integer i) {
                    return new Tuple2<>(i % 10, 1);
                }
            });
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey(new Function2<Integer, Integer, Integer>() {
                public Integer call(Integer i1, Integer i2) {
                    return i1 + i2;
                }
            });

    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.wrmsr.nativity.x86.App.java

public static void main(String[] args) throws Exception {
    logger.info("hi");

    Document doc;/*from   ww w .  j a  v a 2s. co  m*/
    try (InputStream is = App.class.getClassLoader().getResourceAsStream("x86reference.xml")) {
        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
        dbFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false);
        dbFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        doc = dBuilder.parse(is);
    }

    //optional, but recommended
    //read this - http://stackoverflow.com/questions/13786607/normalization-in-dom-parsing-with-java-how-does-it-work
    doc.getDocumentElement().normalize();

    List<Ref.Entry> entries = Lists.newArrayList();
    Ref.Parsing.parseRoot(doc, entries);
    ByteTrie<Ref.Entry> trie = DisImpl.buildTrie(entries);

    System.out.println(trie.toDetailedString());
    System.out.println();
    System.out.println();

    // Dis.run(trie);

    Ordering<Pair<Ref.Operand.Type, Ref.Operand.Address>> ord = Ordering.from((o1, o2) -> {
        int c = ObjectUtils.compare(o1.getLeft(), o2.getLeft());
        if (c == 0) {
            c = ObjectUtils.compare(o1.getRight(), o2.getRight());
        }
        return c;
    });

    Set<Pair<Ref.Operand.Type, Ref.Operand.Address>> set = Sets.newHashSet();
    for (Ref.Entry entry : entries) {
        for (Ref.Syntax syntax : entry.getSyntaxes()) {
            for (Ref.Operand operand : syntax.getOperands()) {
                set.add(new ImmutablePair<>(operand.type, operand.address));
            }
        }
    }
    for (Pair<Ref.Operand.Type, Ref.Operand.Address> pair : ord.sortedCopy(set)) {
        System.out.println(pair);
    }
    System.out.println("\n");

    DisImpl.run(trie);
}

From source file:com.naltel.spark.JavaQueueStream.java

public static void main(String[] args) throws Exception {

    StreamingExamples.setStreamingLogLevels();
    SparkConf sparkConf = new SparkConf().setAppName("JavaQueueStream");

    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));

    // Create the queue through which RDDs can be pushed to
    // a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<JavaRDD<Integer>>();

    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);//  ww w  .j av a  2 s .  c o  m
    }

    for (int i = 0; i < 30; i++) {
        rddQueue.add(ssc.sparkContext().parallelize(list));
    }

    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair(new PairFunction<Integer, Integer, Integer>() {
                @Override
                public Tuple2<Integer, Integer> call(Integer i) {
                    return new Tuple2<Integer, Integer>(i % 10, 1);
                }
            });
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer i1, Integer i2) {
                    return i1 + i2;
                }
            });

    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:org.spongepowered.despector.Main.java

public static void main(String[] args) throws IOException {
    if (args.length < 2) {
        System.out.println("Usage: java -jar Despector.jar [sources...] [destination]");
        return;// w  w w.  j  ava2s  .  co m
    }
    List<String> sources = Lists.newArrayList();
    for (int i = 0; i < args.length - 1; i++) {
        if (args[i].startsWith("-")) {
            // TODO parse flags
        } else {
            sources.add(args[i]);
        }
    }
    String destination = args[args.length - 1];
    Path output = Paths.get(destination).toAbsolutePath();
    if (!Files.exists(output)) {
        Files.createDirectories(output);
    }

    SourceSet source = new SourceSet();
    for (String s : sources) {
        Path path = Paths.get(s);
        if (!Files.exists(path)) {
            System.err.println("Unknown source: " + path.toAbsolutePath().toString());
        } else if (s.endsWith(".jar")) {
            JarWalker walker = new JarWalker(path);
            walker.walk(source);
        } else if (Files.isDirectory(path)) {
            DirectoryWalker walker = new DirectoryWalker(path);
            try {
                walker.walk(source);
            } catch (IOException e) {
                System.err.println("Error while walking directory: " + path.toAbsolutePath().toString());
                e.printStackTrace();
            }
        } else if (s.endsWith(".class")) {
            SingularClassLoader.instance.load(path, source);
        } else {
            System.err.println(
                    "Unknown source type: " + path.toAbsolutePath().toString() + " must be jar or directory");
        }
    }

    if (source.getAllClasses().isEmpty()) {
        System.err.println("No sources found.");
        return;
    }

    for (TypeEntry type : source.getAllClasses()) {
        Path out = output.resolve(type.getName() + ".java");
        if (!Files.exists(out.getParent())) {
            Files.createDirectories(out.getParent());
        }
        try (FileWriter writer = new FileWriter(out.toFile())) {
            SourceEmitter emitter = new SourceEmitter(writer);
            emitter.emitType(type);
        }
    }

}

From source file:com.infinities.keystone4j.policy.reducer.WrapCheckReducer.java

public static void main(String args[]) {
    List<String> list1 = Lists.newArrayList();

    list1.add("111");
    list1.add("222");
    list1.add("333");
    list1.add("444");
    list1.add("555");

    List<String> list2 = Lists.newArrayList();
    list2.add("222");
    list2.add("333");
    list2.add("444");

    List<String> list3 = Lists.newArrayList();
    list3.add("222");
    list3.add("444");
    list3.add("333");

    System.out.println(list1.containsAll(list2));
    System.out.println(list1.containsAll(list3));
}

From source file:org.apache.spark.streaming.examples.JavaQueueStream.java

public static void main(String[] args) throws Exception {
    if (args.length < 1) {
        System.err.println("Usage: JavaQueueStream <master>");
        System.exit(1);/* w  w  w.j  a v  a  2s  . co m*/
    }

    StreamingExamples.setStreamingLogLevels();

    // Create the context
    JavaStreamingContext ssc = new JavaStreamingContext(args[0], "QueueStream", new Duration(1000),
            System.getenv("SPARK_HOME"), JavaStreamingContext.jarOfClass(JavaQueueStream.class));

    // Create the queue through which RDDs can be pushed to
    // a QueueInputDStream
    Queue<JavaRDD<Integer>> rddQueue = new LinkedList<JavaRDD<Integer>>();

    // Create and push some RDDs into the queue
    List<Integer> list = Lists.newArrayList();
    for (int i = 0; i < 1000; i++) {
        list.add(i);
    }

    for (int i = 0; i < 30; i++) {
        rddQueue.add(ssc.sparkContext().parallelize(list));
    }

    // Create the QueueInputDStream and use it do some processing
    JavaDStream<Integer> inputStream = ssc.queueStream(rddQueue);
    JavaPairDStream<Integer, Integer> mappedStream = inputStream
            .mapToPair(new PairFunction<Integer, Integer, Integer>() {
                @Override
                public Tuple2<Integer, Integer> call(Integer i) {
                    return new Tuple2<Integer, Integer>(i % 10, 1);
                }
            });
    JavaPairDStream<Integer, Integer> reducedStream = mappedStream
            .reduceByKey(new Function2<Integer, Integer, Integer>() {
                @Override
                public Integer call(Integer i1, Integer i2) {
                    return i1 + i2;
                }
            });

    reducedStream.print();
    ssc.start();
    ssc.awaitTermination();
}