List of usage examples for com.google.common.collect Lists newArrayList
@GwtCompatible(serializable = true) public static <E> ArrayList<E> newArrayList(Iterator<? extends E> elements)
From source file:com.android.icu4j.srcgen.CaptureDeprecatedElements.java
/** * Usage://from w w w.j av a2s. com * java com.android.icu4j.srcgen.CaptureDeprecatedMethods {one or more source directories} */ public static void main(String[] args) throws Exception { CaptureDeprecatedMethodsRules rules = new CaptureDeprecatedMethodsRules(args); new Main(DEBUG).execute(rules); List<String> deprecatedElements = rules.getCaptureRule().getDeprecatedElements(); // ASCII order for easier maintenance of the source this goes into. List<String> sortedDeprecatedElements = Lists.newArrayList(deprecatedElements); Collections.sort(sortedDeprecatedElements); for (String entry : sortedDeprecatedElements) { String entryInAndroid = entry.replace(ORIGINAL_ICU_PREFIX, ANDROID_ICU_PREFIX); System.out.println(" \"" + entryInAndroid + "\","); } }
From source file:org.apache.spark.examples.streaming.JavaNetworkWordCount.java
public static void main(String[] args) { if (args.length < 2) { System.err.println("Usage: JavaNetworkWordCount <hostname> <port>"); System.exit(1);/* w ww . j a v a2 s . c o m*/ } StreamingExamples.setStreamingLogLevels(); SparkConf sparkConf = new SparkConf().setAppName("JavaNetworkWordCount"); // Create the context with a 1 second batch size JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000)); // Create a JavaReceiverInputDStream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1])); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); }
From source file:com.google.api.services.samples.youtube.cmdline.live.ListLiveChatMessages.java
/** * Lists live chat messages and SuperChat details from a live broadcast. * * @param args videoId (optional). If the videoId is given, live chat messages will be retrieved * from the chat associated with this video. If the videoId is not specified, the signed in * user's current live broadcast will be used instead. *//*from ww w . j av a2 s. c o m*/ public static void main(String[] args) { // This OAuth 2.0 access scope allows for read-only access to the // authenticated user's account, but not other types of account access. List<String> scopes = Lists.newArrayList(YouTubeScopes.YOUTUBE_READONLY); try { // Authorize the request. Credential credential = Auth.authorize(scopes, "listlivechatmessages"); // This object is used to make YouTube Data API requests. youtube = new YouTube.Builder(Auth.HTTP_TRANSPORT, Auth.JSON_FACTORY, credential) .setApplicationName("youtube-cmdline-listchatmessages-sample").build(); // Get the liveChatId String liveChatId = args.length == 1 ? GetLiveChatId.getLiveChatId(youtube, args[0]) : GetLiveChatId.getLiveChatId(youtube); if (liveChatId != null) { System.out.println("Live chat id: " + liveChatId); } else { System.err.println("Unable to find a live chat id"); System.exit(1); } // Get live chat messages listChatMessages(liveChatId, null, 0); } catch (GoogleJsonResponseException e) { System.err.println("GoogleJsonResponseException code: " + e.getDetails().getCode() + " : " + e.getDetails().getMessage()); e.printStackTrace(); } catch (IOException e) { System.err.println("IOException: " + e.getMessage()); e.printStackTrace(); } catch (Throwable t) { System.err.println("Throwable: " + t.getMessage()); t.printStackTrace(); } }
From source file:org.apache.spark.streaming.examples.JavaNetworkWordCount.java
public static void main(String[] args) { if (args.length < 3) { System.err.println("Usage: JavaNetworkWordCount <master> <hostname> <port>\n" + "In local mode, <master> should be 'local[n]' with n > 1"); System.exit(1);//from ww w . java 2 s .co m } StreamingExamples.setStreamingLogLevels(); // Create the context with a 1 second batch size JavaStreamingContext ssc = new JavaStreamingContext(args[0], "JavaNetworkWordCount", new Duration(1000), System.getenv("SPARK_HOME"), JavaStreamingContext.jarOfClass(JavaNetworkWordCount.class)); // Create a NetworkInputDStream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') JavaDStream<String> lines = ssc.socketTextStream(args[1], Integer.parseInt(args[2])); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); }
From source file:org.apache.streams.example.graph.TwitterFollowGraph.java
public static void main(String[] args) { LOGGER.info(StreamsConfigurator.config.toString()); StreamsConfiguration streams = StreamsConfigurator.detectConfiguration(); TwitterFollowingGraphConfiguration configuration = new ComponentConfigurator<>( TwitterFollowingGraphConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig()); TwitterFollowingConfiguration twitterFollowingConfiguration = configuration.getTwitter(); TwitterFollowingProvider followingProvider = new TwitterFollowingProvider(twitterFollowingConfiguration); TypeConverterProcessor converter = new TypeConverterProcessor(String.class); ActivityConverterProcessorConfiguration activityConverterProcessorConfiguration = new ActivityConverterProcessorConfiguration() .withClassifiers(Lists.newArrayList((DocumentClassifier) new TwitterDocumentClassifier())) .withConverters(Lists.newArrayList((ActivityConverter) new TwitterFollowActivityConverter())); ActivityConverterProcessor activity = new ActivityConverterProcessor( activityConverterProcessorConfiguration); GraphHttpConfiguration graphWriterConfiguration = configuration.getGraph(); GraphHttpPersistWriter graphPersistWriter = new GraphHttpPersistWriter(graphWriterConfiguration); StreamBuilder builder = new LocalStreamBuilder(); builder.newPerpetualStream(TwitterFollowingProvider.STREAMS_ID, followingProvider); builder.addStreamsProcessor("converter", converter, 1, TwitterFollowingProvider.STREAMS_ID); builder.addStreamsProcessor("activity", activity, 1, "converter"); builder.addStreamsPersistWriter("graph", graphPersistWriter, 1, "activity"); builder.start();//from w w w. ja v a 2 s .co m }
From source file:cn.com.warlock.streaming.JavaKafkaWordCount.java
public static void main(String[] args) { if (args.length < 4) { System.err.println("Usage: JavaKafkaWordCount <zkQuorum> <group> <topics> <numThreads>"); System.exit(1);/*from w w w. j ava 2s. c o m*/ } // ?? String zkAddres = args[0]; String group = args[1]; int numThreads = Integer.parseInt(args[3]); Map<String, Integer> topicMap = new HashMap<String, Integer>(); String[] topics = args[2].split(","); for (String topic : topics) { topicMap.put(topic, numThreads); } SparkConf sparkConf = new SparkConf().setAppName("JavaKafkaWordCount"); // StreamingContext,?2? JavaStreamingContext jssc = new JavaStreamingContext(sparkConf, new Duration(2000)); // zookeeper ?? consumer group ? Kafka stream JavaPairReceiverInputDStream<String, String> messages = KafkaUtils.createStream(jssc, zkAddres, group, topicMap); // ? JavaDStream<String> lines = messages.map(new Function<Tuple2<String, String>, String>() { @Override public String call(Tuple2<String, String> tuple2) { return tuple2._2(); } }); // ??? JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); // ?? JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); jssc.start(); jssc.awaitTermination(); }
From source file:com.weibangong.spark.streaming.JavaNetworkWordCount.java
public static void main(String[] args) { if (args.length < 2) { System.err.println("Usage: JavaNetworkWordCount <hostname> <port>"); System.exit(1);/*from w ww.ja va 2s . c o m*/ } // Create the context with a 1 second batch size SparkConf sparkConf = new SparkConf().setAppName("JavaNetworkWordCount"); JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1)); // Create a JavaReceiverInputDStream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') // Note that no duplication in storage level only for running locally. // Replication necessary in distributed scenario for fault tolerance. JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]), StorageLevels.MEMORY_AND_DISK_SER); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); }
From source file:spark.java.JavaNetworkWordCount.java
public static void main(String[] args) { if (args.length < 2) { System.err.println("Usage: JavaNetworkWordCount <hostname> <port>"); System.exit(1);/*from www . ja v a 2 s . c o m*/ } //StreamingExamples.setStreamingLogLevels(); // Create the context with a 1 second batch size SparkConf sparkConf = new SparkConf().setAppName("JavaNetworkWordCount"); JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000)); // Create a JavaReceiverInputDStream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') // Note that no duplication in storage level only for running locally. // Replication necessary in distributed scenario for fault tolerance. JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]), StorageLevels.MEMORY_AND_DISK_SER); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); }
From source file:org.apache.giraph.debugger.CommandLine.java
/** * Main function of the CommandLine.// w w w. j a va2s. com * @param args command line arguments. */ public static void main(final String[] args) { // Validate String mode = args[0]; if (args.length == 0 || !mode.equalsIgnoreCase("list") && !mode.equalsIgnoreCase("dump") && !mode.equalsIgnoreCase("mktest") && !mode.equalsIgnoreCase("dump-master") && !mode.equalsIgnoreCase("mktest-master")) { printHelp(); } if (args.length <= 1) { printHelp(); } String jobId = args[1]; if (mode.equalsIgnoreCase("list")) { try { List<Long> superstepsDebuggedMaster = ServerUtils.getSuperstepsMasterDebugged(jobId); Set<Long> superstepsDebugged = Sets.newHashSet(ServerUtils.getSuperstepsDebugged(jobId)); superstepsDebugged.addAll(superstepsDebuggedMaster); List<Long> allSupersteps = Lists.newArrayList(superstepsDebugged); Collections.sort(allSupersteps); for (Long superstepNo : allSupersteps) { if (superstepsDebuggedMaster.contains(superstepNo)) { LOG.info(String.format("%-15s %s %4d ", "dump-master", jobId, superstepNo)); LOG.info(String.format("%-15s %s %4d TestMaster_%s_S%d", "mktest-master", jobId, superstepNo, jobId, superstepNo)); } List<DebugTrace> debugTraces = Arrays.asList(DebugTrace.INTEGRITY_MESSAGE_SINGLE_VERTEX, DebugTrace.INTEGRITY_VERTEX, DebugTrace.VERTEX_EXCEPTION, DebugTrace.VERTEX_REGULAR); for (DebugTrace debugTrace : debugTraces) { for (String vertexId : ServerUtils.getVerticesDebugged(jobId, superstepNo, debugTrace)) { LOG.info(String.format("%-15s %s %4d %8s # %s", "dump", jobId, superstepNo, vertexId, debugTrace.getLabel() == null ? "" : "captured " + debugTrace.getLabel())); LOG.info(String.format("%-15s %s %4d %8s Test_%s_S%d_V%s", "mktest", jobId, superstepNo, vertexId, jobId, superstepNo, vertexId)); } } } } catch (IOException e) { e.printStackTrace(); } } else { if (args.length <= 2) { printHelp(); } Long superstepNo = Long.parseLong(args[2]); try { if (mode.equalsIgnoreCase("dump") || mode.equalsIgnoreCase("mktest")) { if (args.length <= 3) { printHelp(); } String vertexId = args[3]; // Read scenario. // TODO: rename ServerUtils to Utils @SuppressWarnings("rawtypes") GiraphVertexScenarioWrapper scenarioWrapper = ServerUtils.readScenarioFromTrace(jobId, superstepNo, vertexId, DebugTrace.VERTEX_ALL); if (scenarioWrapper == null) { LOG.error("The trace file does not exist."); System.exit(2); } if (mode.equalsIgnoreCase("dump")) { LOG.info(scenarioWrapper); } else if (mode.equalsIgnoreCase("mktest")) { // Read output prefix and test class. if (args.length <= 4) { printHelp(); } String outputPrefix = args[4].trim(); String testClassName = new File(outputPrefix).getName(); // Generate test case. String generatedTestCase = new ComputationComputeTestGenerator() .generateTest(scenarioWrapper, null, testClassName); outputTestCase(outputPrefix, generatedTestCase); } } else if (mode.equalsIgnoreCase("dump-master") || mode.equalsIgnoreCase("mktest-master")) { GiraphMasterScenarioWrapper scenarioWrapper = ServerUtils.readMasterScenarioFromTrace(jobId, superstepNo, DebugTrace.MASTER_ALL); if (scenarioWrapper == null) { LOG.error("The trace file does not exist."); System.exit(2); } if (mode.equalsIgnoreCase("dump-master")) { LOG.info(scenarioWrapper); } else if (mode.equalsIgnoreCase("mktest-master")) { if (args.length <= 3) { printHelp(); } String outputPrefix = args[3].trim(); String testClassName = new File(outputPrefix).getName(); String generatedTestCase = new MasterComputeTestGenerator().generateTest(scenarioWrapper, null, testClassName); outputTestCase(outputPrefix, generatedTestCase); } } else { printHelp(); } } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IOException e) { e.printStackTrace(); } } }
From source file:com.sdw.dream.spark.examples.streaming.JavaNetworkWordCount.java
public static void main(String[] args) { if (args.length < 2) { System.err.println("Usage: JavaNetworkWordCount <hostname> <port>"); System.exit(1);/*from www . jav a 2 s . c o m*/ } StreamingExamples.setStreamingLogLevels(); // Create the context with a 1 second batch size SparkConf sparkConf = new SparkConf().setAppName("JavaNetworkWordCount"); JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1)); // Create a JavaReceiverInputDStream on target ip:port and count the // words in input stream of \n delimited text (eg. generated by 'nc') // Note that no duplication in storage level only for running locally. // Replication necessary in distributed scenario for fault tolerance. JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]), StorageLevels.MEMORY_AND_DISK_SER); JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() { @Override public Iterable<String> call(String x) { return Lists.newArrayList(SPACE.split(x)); } }); JavaPairDStream<String, Integer> wordCounts = words.mapToPair(new PairFunction<String, String, Integer>() { @Override public Tuple2<String, Integer> call(String s) { return new Tuple2<String, Integer>(s, 1); } }).reduceByKey(new Function2<Integer, Integer, Integer>() { @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; } }); wordCounts.print(); ssc.start(); ssc.awaitTermination(); }