Download beam-runners-spark-0.2.0.jar file

Introduction

You can download beam-runners-spark-0.2.0.jar in this page.

License

Apache License

Type List

beam-runners-spark-0.2.0.jar file has the following types.

META-INF/DEPENDENCIES
META-INF/LICENSE
META-INF/MANIFEST.MF
META-INF/NOTICE
META-INF/maven/org.apache.beam/beam-runners-spark/pom.properties
META-INF/maven/org.apache.beam/beam-runners-spark/pom.xml
META-INF/services/org.apache.beam.sdk.options.PipelineOptionsRegistrar
META-INF/services/org.apache.beam.sdk.runners.PipelineRunnerRegistrar
org.apache.beam.runners.spark.EvaluationResult.class
org.apache.beam.runners.spark.SparkPipelineOptions.class
org.apache.beam.runners.spark.SparkRunner.class
org.apache.beam.runners.spark.SparkRunnerRegistrar.class
org.apache.beam.runners.spark.SparkStreamingPipelineOptions.class
org.apache.beam.runners.spark.TestSparkRunner.class
org.apache.beam.runners.spark.aggregators.AggAccumParam.class
org.apache.beam.runners.spark.aggregators.NamedAggregators.class
org.apache.beam.runners.spark.coders.CoderHelpers.class
org.apache.beam.runners.spark.coders.NullWritableCoder.class
org.apache.beam.runners.spark.coders.WritableCoder.class
org.apache.beam.runners.spark.examples.WordCount.class
org.apache.beam.runners.spark.io.ConsoleIO.class
org.apache.beam.runners.spark.io.CreateStream.class
org.apache.beam.runners.spark.io.KafkaIO.class
org.apache.beam.runners.spark.io.hadoop.HadoopIO.class
org.apache.beam.runners.spark.io.hadoop.ShardNameBuilder.class
org.apache.beam.runners.spark.io.hadoop.ShardNameTemplateAware.class
org.apache.beam.runners.spark.io.hadoop.ShardNameTemplateHelper.class
org.apache.beam.runners.spark.io.hadoop.TemplatedAvroKeyOutputFormat.class
org.apache.beam.runners.spark.io.hadoop.TemplatedSequenceFileOutputFormat.class
org.apache.beam.runners.spark.io.hadoop.TemplatedTextOutputFormat.class
org.apache.beam.runners.spark.translation.DoFnFunction.class
org.apache.beam.runners.spark.translation.EvaluationContext.class
org.apache.beam.runners.spark.translation.MultiDoFnFunction.class
org.apache.beam.runners.spark.translation.SparkContextFactory.class
org.apache.beam.runners.spark.translation.SparkPipelineEvaluator.class
org.apache.beam.runners.spark.translation.SparkPipelineTranslator.class
org.apache.beam.runners.spark.translation.SparkProcessContext.class
org.apache.beam.runners.spark.translation.SparkRuntimeContext.class
org.apache.beam.runners.spark.translation.TransformEvaluator.class
org.apache.beam.runners.spark.translation.TransformTranslator.class
org.apache.beam.runners.spark.translation.WindowingHelpers.class
org.apache.beam.runners.spark.translation.streaming.StreamingEvaluationContext.class
org.apache.beam.runners.spark.translation.streaming.StreamingTransformTranslator.class
org.apache.beam.runners.spark.translation.streaming.StreamingWindowPipelineDetector.class
org.apache.beam.runners.spark.util.BroadcastHelper.class
org.apache.beam.runners.spark.util.ByteArray.class
org.apache.beam.runners.spark.util.SinglePrimitiveOutputPTransform.class

Pom

beam-runners-spark-0.2.0.pom file content.

<?xml version="1.0" encoding="UTF-8"?>
<!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
    this work for additional information regarding copyright ownership.
    The ASF licenses this file to You under the Apache License, Version 2.0
    (the "License"); you may not use this file except in compliance with
    the License.  You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

    Unless required by applicable law or agreed to in writing, software
    distributed under the License is distributed on an "AS IS" BASIS,
    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    See the License for the specific language governing permissions and
    limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

  <modelVersion>4.0.0</modelVersion>

  <parent>
    <groupId>org.apache.beam</groupId>
    <artifactId>beam-runners-parent</artifactId>
    <version>0.2.0-incubating</version>
    <relativePath>../pom.xml</relativePath>
  </parent>

  <artifactId>beam-runners-spark</artifactId>

  <name>Apache Beam :: Runners :: Spark</name>
  <packaging>jar</packaging>

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
    <spark.version>1.6.2</spark.version>
    <hadoop.version>2.2.0</hadoop.version>
    <kafka.version>0.8.2.1</kafka.version>
  </properties>

  <profiles>
    <profile>
      <id>jacoco</id>
      <build>
        <plugins>
          <plugin>
            <groupId>org.jacoco</groupId>
            <artifactId>jacoco-maven-plugin</artifactId>
          </plugin>
        </plugins>
      </build>
    </profile>

    <profile>
      <!-- This profile adds execution of RunnableOnService integration tests 
           against a local Spark endpoint. -->
      <id>runnable-on-service-tests</id>
      <activation><activeByDefault>false</activeByDefault></activation>
      <build>
        <pluginManagement>
          <plugins>
            <plugin>
              <groupId>org.apache.maven.plugins</groupId>
              <artifactId>maven-surefire-plugin</artifactId>
              <executions>
                <execution>
                  <id>runnable-on-service-tests</id>
                  <configuration>
                    <groups>org.apache.beam.sdk.testing.RunnableOnService</groups>
                    <parallel>none</parallel>
                    <failIfNoTests>true</failIfNoTests>
                    <dependenciesToScan>
                      <dependency>org.apache.beam:java-sdk-all</dependency>
                    </dependenciesToScan>
                    <excludes>
                      org.apache.beam.sdk.io.BoundedReadFromUnboundedSourceTest
                    </excludes>
                    <systemPropertyVariables>
                      <beamTestPipelineOptions>
                        [
                          "--runner=TestSparkRunner",
                          "--streaming=false"
                        ]
                      </beamTestPipelineOptions>
                      <beam.spark.test.reuseSparkContext>true</beam.spark.test.reuseSparkContext>
                      <spark.ui.enabled>false</spark.ui.enabled>
                    </systemPropertyVariables>
                  </configuration>
                </execution>
              </executions>
            </plugin>
          </plugins>
        </pluginManagement>
      </build>
    </profile>
  </profiles>

  <dependencies>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-core_2.10</artifactId>
      <version>${spark.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-streaming_2.10</artifactId>
      <version>${spark.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-streaming-kafka_2.10</artifactId>
      <version>${spark.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.spark</groupId>
      <artifactId>spark-network-common_2.10</artifactId>
      <version>${spark.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka_2.10</artifactId>
      <version>${kafka.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.kafka</groupId>
      <artifactId>kafka-clients</artifactId>
      <version>${kafka.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-common</artifactId>
      <version>${hadoop.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.hadoop</groupId>
      <artifactId>hadoop-mapreduce-client-core</artifactId>
      <version>${hadoop.version}</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>com.google.guava</groupId>
      <artifactId>guava</artifactId>
    </dependency>
    <dependency>
      <groupId>com.google.auto.service</groupId>
      <artifactId>auto-service</artifactId>
      <optional>true</optional>
    </dependency>
    <dependency>
      <groupId>com.google.http-client</groupId>
      <artifactId>google-http-client</artifactId>
    </dependency>
    <dependency>
      <groupId>com.fasterxml.jackson.core</groupId>
      <artifactId>jackson-core</artifactId>
    </dependency>
    <dependency>
      <groupId>com.fasterxml.jackson.core</groupId>
      <artifactId>jackson-annotations</artifactId>
    </dependency>
      <dependency>
      <groupId>com.fasterxml.jackson.core</groupId>
      <artifactId>jackson-databind</artifactId>
    </dependency>
    <dependency>
      <groupId>org.apache.avro</groupId>
      <artifactId>avro</artifactId>
    </dependency>
    <dependency>
      <groupId>org.slf4j</groupId>
      <artifactId>slf4j-api</artifactId>
    </dependency>
    <dependency>
      <groupId>joda-time</groupId>
      <artifactId>joda-time</artifactId>
    </dependency>
    <dependency>
      <groupId>commons-io</groupId>
      <artifactId>commons-io</artifactId>
      <version>2.4</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.zookeeper</groupId>
      <artifactId>zookeeper</artifactId>
      <version>3.4.6</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.scala-lang</groupId>
      <artifactId>scala-library</artifactId>
      <version>2.10.5</version>
      <scope>provided</scope>
    </dependency>
    <dependency>
      <groupId>org.apache.beam</groupId>
      <artifactId>beam-sdks-java-core</artifactId>
      <exclusions>
        <!-- Use Hadoop/Spark's backend logger instead of jdk14 for tests -->
        <exclusion>
          <groupId>org.slf4j</groupId>
          <artifactId>slf4j-jdk14</artifactId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.beam</groupId>
      <artifactId>beam-runners-core-java</artifactId>
      <exclusions>
        <!-- Use Hadoop/Spark's backend logger instead of jdk14 for tests -->
        <exclusion>
          <groupId>org.slf4j</groupId>
          <artifactId>slf4j-jdk14</artifactId>
        </exclusion>
      </exclusions>
    </dependency>
    <dependency>
      <groupId>org.apache.avro</groupId>
      <artifactId>avro-mapred</artifactId>
      <version>${avro.version}</version>
      <classifier>hadoop2</classifier>
      <exclusions>
        <!-- exclude old Jetty version of servlet API -->
        <exclusion>
          <groupId>org.mortbay.jetty</groupId>
          <artifactId>servlet-api</artifactId>
        </exclusion>
      </exclusions>
    </dependency>

    <!-- test dependencies -->
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.hamcrest</groupId>
      <artifactId>hamcrest-all</artifactId>
      <scope>test</scope>
    </dependency>

    <!-- Depend on test jar to scan for RunnableOnService tests -->
    <dependency>
      <groupId>org.apache.beam</groupId>
      <artifactId>beam-sdks-java-core</artifactId>
      <classifier>tests</classifier>
      <scope>test</scope>
      <exclusions>
        <exclusion>
          <groupId>org.slf4j</groupId>
          <artifactId>slf4j-jdk14</artifactId>
        </exclusion>
      </exclusions>
    </dependency>

    <dependency>
      <groupId>org.mockito</groupId>
      <artifactId>mockito-all</artifactId>
      <scope>test</scope>
    </dependency>
  </dependencies>

  <build>
    <pluginManagement>
      <plugins>
        <plugin>
          <groupId>org.apache.maven.plugins</groupId>
          <artifactId>maven-surefire-plugin</artifactId>
          <!-- Spark runner requires this surefire version for the fork "control" -->
          <version>2.19.1</version>
          <configuration>
            <forkCount>1</forkCount>
            <reuseForks>false</reuseForks>
            <systemPropertyVariables>
              <beam.spark.test.reuseSparkContext>true</beam.spark.test.reuseSparkContext>
              <spark.ui.enabled>false</spark.ui.enabled>
            </systemPropertyVariables>
          </configuration>
        </plugin>
        <plugin>
          <groupId>org.codehaus.mojo</groupId>
          <artifactId>exec-maven-plugin</artifactId>
          <version>1.4.0</version>
          <configuration>
            <executable>java</executable>
            <classpathScope>test</classpathScope> <!-- include provided deps -->
            <arguments>
              <argument>-classpath</argument>
              <classpath />
              <argument>${mainClass}</argument>
              <argument>--inputFile=${input}</argument>
              <argument>--output=${output}</argument>
              <argument>--runner=${runner}</argument>
              <argument>--sparkMaster=${sparkMaster}</argument>
            </arguments>
          </configuration>
        </plugin>
        <plugin>
          <groupId>org.jacoco</groupId>
          <artifactId>jacoco-maven-plugin</artifactId>
          <version>0.7.5.201505241946</version>
          <executions>
            <execution>
              <goals>
                <goal>prepare-agent</goal>
              </goals>
            </execution>
            <execution>
              <id>report</id>
              <phase>test</phase>
              <goals>
                <goal>report</goal>
              </goals>
            </execution>
          </executions>
        </plugin>
        <plugin>
          <groupId>org.apache.maven.plugins</groupId>
          <artifactId>maven-shade-plugin</artifactId>
          <executions>
            <execution>
              <phase>package</phase>
              <goals>
                <goal>shade</goal>
              </goals>
              <configuration>
                <relocations>
                  <!-- relocate Guava used by Dataflow (v18) since it conflicts with 
                    version used by Hadoop (v11) -->
                  <relocation>
                    <pattern>com.google.common</pattern>
                    <shadedPattern>org.apache.beam.spark.relocated.com.google.common</shadedPattern>
                  </relocation>
                  <relocation>
                    <pattern>com.google.thirdparty</pattern>
                    <shadedPattern>org.apache.beam.spark.relocated.com.google.thirdparty</shadedPattern>
                  </relocation>
                </relocations>
                <shadedArtifactAttached>true</shadedArtifactAttached>
                <shadedClassifierName>spark-app</shadedClassifierName>
                <transformers>
                  <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
                </transformers>
              </configuration>
            </execution>
          </executions>
        </plugin>
      </plugins>
    </pluginManagement>
    <plugins>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-compiler-plugin</artifactId>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-jar-plugin</artifactId>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-checkstyle-plugin</artifactId>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-source-plugin</artifactId>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-javadoc-plugin</artifactId>
      </plugin>
      <plugin>
        <groupId>org.apache.maven.plugins</groupId>
        <artifactId>maven-shade-plugin</artifactId>
      </plugin>
    </plugins>
  </build>

</project>

POM Entry

<dependency>
   <groupId>org.apache.beam</groupId>
   <artifactId>beam-runners-spark</artifactId>
   <version>0.2.0</version>
</dependency>

Download

If you think the following beam-runners-spark-0.2.0.jar downloaded from Maven central repository is inappropriate, such as containing malicious code/tools or violating the copyright, please email , thanks.



Download beam-runners-spark-0.2.0.jar file




PreviousNext

Related