Java tutorial
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.training.dataanalyst.sandiego; import java.util.ArrayList; import java.util.List; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.PubsubIO; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.windowing.SlidingWindows; import org.apache.beam.sdk.transforms.windowing.Window; import org.apache.beam.sdk.values.PCollection; import org.joda.time.Duration; import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.api.services.bigquery.model.TableRow; import com.google.api.services.bigquery.model.TableSchema; /** * A dataflow pipeline that pulls from Pub/Sub and writes to BigQuery * * @author vlakshmanan * */ public class CurrentConditions { public static interface MyOptions extends DataflowPipelineOptions { } @SuppressWarnings("serial") public static void main(String[] args) { MyOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().as(MyOptions.class); options.setStreaming(true); Pipeline p = Pipeline.create(options); String topic = "projects/" + options.getProject() + "/topics/sandiego"; String currConditionsTable = options.getProject() + ":demos.current_conditions"; // Build the table schema for the output table. List<TableFieldSchema> fields = new ArrayList<>(); fields.add(new TableFieldSchema().setName("timestamp").setType("TIMESTAMP")); fields.add(new TableFieldSchema().setName("latitude").setType("FLOAT")); fields.add(new TableFieldSchema().setName("longitude").setType("FLOAT")); fields.add(new TableFieldSchema().setName("highway").setType("STRING")); fields.add(new TableFieldSchema().setName("direction").setType("STRING")); fields.add(new TableFieldSchema().setName("lane").setType("INTEGER")); fields.add(new TableFieldSchema().setName("speed").setType("FLOAT")); fields.add(new TableFieldSchema().setName("sensorId").setType("STRING")); TableSchema schema = new TableSchema().setFields(fields); PCollection<LaneInfo> laneInfo = p // .apply("GetMessages", PubsubIO.<String>read().topic(topic).withCoder(StringUtf8Coder.of())) // .apply("TimeWindow", Window.into(SlidingWindows// .of(Duration.standardSeconds(300))// .every(Duration.standardSeconds(60)))) // .apply("ExtractData", ParDo.of(new DoFn<String, LaneInfo>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { String line = c.element(); c.output(LaneInfo.newLaneInfo(line)); } })); laneInfo.apply("ToBQRow", ParDo.of(new DoFn<LaneInfo, TableRow>() { @ProcessElement public void processElement(ProcessContext c) throws Exception { TableRow row = new TableRow(); LaneInfo info = c.element(); row.set("timestamp", info.getTimestamp()); row.set("latitude", info.getLatitude()); row.set("longitude", info.getLongitude()); row.set("highway", info.getHighway()); row.set("direction", info.getDirection()); row.set("lane", info.getLane()); row.set("speed", info.getSpeed()); row.set("sensorId", info.getSensorKey()); c.output(row); } })) // .apply(BigQueryIO.Write.to(currConditionsTable)// .withSchema(schema)// .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND) .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)); p.run(); } }