beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From echauc...@apache.org
Subject [beam] 01/01: [TO UPGRADE WITH THE 2 SPARK RUNNERS BEFORE MERGE] Change de wordcount build to test on new spark runner
Date Fri, 01 Mar 2019 10:26:44 GMT
This is an automated email from the ASF dual-hosted git repository.

echauchot pushed a commit to branch spark-runner_structured-streaming
in repository https://gitbox.apache.org/repos/asf/beam.git

commit 46914af5de53254bca46cc9b2da5db28649ac397
Author: Etienne Chauchot <echauchot@apache.org>
AuthorDate: Thu Feb 28 14:35:05 2019 +0100

    [TO UPGRADE WITH THE 2 SPARK RUNNERS BEFORE MERGE] Change de wordcount build to test on
new spark runner
---
 examples/java/build.gradle | 35 ++++++++++++++++++++---------------
 1 file changed, 20 insertions(+), 15 deletions(-)

diff --git a/examples/java/build.gradle b/examples/java/build.gradle
index 7b7fe3e..69b4a9d 100644
--- a/examples/java/build.gradle
+++ b/examples/java/build.gradle
@@ -37,18 +37,23 @@ def preCommitRunners = ["directRunner", "flinkRunner", "sparkRunner"]
 for (String runner : preCommitRunners) {
   configurations.create(runner + "PreCommit")
 }
-configurations.sparkRunnerPreCommit {
-  // Ban certain dependencies to prevent a StackOverflow within Spark
-  // because JUL -> SLF4J -> JUL, and similarly JDK14 -> SLF4J -> JDK14
-  exclude group: "org.slf4j", module: "jul-to-slf4j"
-  exclude group: "org.slf4j", module: "slf4j-jdk14"
-}
-
 dependencies {
   compile library.java.guava
   shadow project(path: ":beam-sdks-java-core", configuration: "shadow")
   shadow project(path: ":beam-sdks-java-extensions-google-cloud-platform-core", configuration:
"shadow")
   shadow project(path: ":beam-sdks-java-io-google-cloud-platform", configuration: "shadow")
+  shadow project(path: ":beam-runners-spark-structured-streaming", configuration: "shadow")
+  shadow library.java.guava
+  shadow library.java.slf4j_api
+  shadow library.java.joda_time
+  provided library.java.spark_sql
+  provided library.java.commons_compress
+  provided library.java.commons_lang3
+  provided library.java.commons_io_2x
+  provided library.java.hamcrest_core
+  provided library.java.hamcrest_library
+  shadow "com.fasterxml.jackson.module:jackson-module-scala_2.11:2.9.8"
+
   shadow library.java.google_api_client
   shadow library.java.google_api_services_bigquery
   shadow library.java.google_http_client
@@ -79,13 +84,7 @@ dependencies {
   // apexRunnerPreCommit project(path: ":beam-runners-apex", configuration: "shadow")
   directRunnerPreCommit project(path: ":beam-runners-direct-java", configuration: "shadow")
   flinkRunnerPreCommit project(path: ":beam-runners-flink_2.11", configuration: "shadow")
-  // TODO: Make the netty version used configurable, we add netty-all 4.1.17.Final so it
appears on the classpath
-  // before 4.1.8.Final defined by Apache Beam
-  sparkRunnerPreCommit "io.netty:netty-all:4.1.17.Final"
-  sparkRunnerPreCommit project(path: ":beam-runners-spark", configuration: "shadow")
-  sparkRunnerPreCommit project(path: ":beam-sdks-java-io-hadoop-file-system", configuration:
"shadow")
-  sparkRunnerPreCommit library.java.spark_streaming
-  sparkRunnerPreCommit library.java.spark_core
+  sparkRunnerPreCommit project(path: ":beam-runners-spark-structured-streaming", configuration:
"shadow")
 }
 
 /*
@@ -96,7 +95,7 @@ def preCommitRunnerClass = [
   apexRunner: "org.apache.beam.runners.apex.TestApexRunner",
   directRunner: "org.apache.beam.runners.direct.DirectRunner",
   flinkRunner: "org.apache.beam.runners.flink.TestFlinkRunner",
-  sparkRunner: "org.apache.beam.runners.spark.TestSparkRunner",
+  sparkRunner: "org.apache.beam.runners.spark.structuredstreaming.SparkRunner",
 ]
 def gcpProject = project.findProperty('gcpProject') ?: 'apache-beam-testing'
 def gcsTempRoot = project.findProperty('gcsTempRoot') ?: 'gs://temp-storage-for-end-to-end-tests/'
@@ -124,5 +123,11 @@ task preCommit() {
   for (String runner : preCommitRunners) {
     dependsOn runner + "PreCommit"
   }
+
+  configurations.all {
+    // Ban certain dependencies to prevent a StackOverflow within Spark
+    // because JUL -> SLF4J -> JUL, and similarly JDK14 -> SLF4J -> JDK14
+    exclude group: "org.slf4j", module: "slf4j-jdk14"
+  }
 }
 


Mime
View raw message