oozie-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gezap...@apache.org
Subject oozie git commit: OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros via gezapeti)
Date Tue, 15 Aug 2017 09:46:19 GMT
Repository: oozie
Updated Branches:
  refs/heads/master 630171297 -> 04a6d0541


OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros
via gezapeti)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/04a6d054
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/04a6d054
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/04a6d054

Branch: refs/heads/master
Commit: 04a6d0541e83ab457db7c79e86986bb69a8eb432
Parents: 6301712
Author: Gezapeti Cseh <gezapeti@gmail.com>
Authored: Tue Aug 15 11:45:39 2017 +0200
Committer: Gezapeti Cseh <gezapeti@gmail.com>
Committed: Tue Aug 15 11:46:14 2017 +0200

----------------------------------------------------------------------
 release-log.txt                                 |  1 +
 .../oozie/action/hadoop/SparkArgsExtractor.java | 68 ++++++++++++++++----
 .../action/hadoop/TestSparkArgsExtractor.java   | 20 ++++++
 3 files changed, 77 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index 0b509bf..f3a1b46 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.0.0 release (trunk - unreleased)
 
+OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros
via gezapeti)
 OOZIE-3028 Oozie Pig Action fails with no python dependencies (dbist13 via rohini)
 OOZIE-2670 Upgrade Hbase to 1.2 (gezapeti via asasvari)
 OOZIE-2608 Comma in oozie.service.JPAService.jdbc.password value results in authentication
error (gezapeti via asasvari)

http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
index ffc95f9..052950f 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
@@ -41,8 +41,12 @@ class SparkArgsExtractor {
     private static final String FILES_OPTION = "--files";
     private static final String ARCHIVES_OPTION = "--archives";
     private static final String LOG4J_CONFIGURATION_JAVA_OPTION = "-Dlog4j.configuration=";
-    private static final String HIVE_SECURITY_TOKEN = "spark.yarn.security.tokens.hive.enabled";
-    private static final String HBASE_SECURITY_TOKEN = "spark.yarn.security.tokens.hbase.enabled";
+    private static final String SECURITY_TOKENS_HADOOPFS = "spark.yarn.security.tokens.hadoopfs.enabled";
+    private static final String SECURITY_TOKENS_HIVE = "spark.yarn.security.tokens.hive.enabled";
+    private static final String SECURITY_TOKENS_HBASE = "spark.yarn.security.tokens.hbase.enabled";
+    private static final String SECURITY_CREDENTIALS_HADOOPFS = "spark.yarn.security.credentials.hadoopfs.enabled";
+    private static final String SECURITY_CREDENTIALS_HIVE = "spark.yarn.security.credentials.hive.enabled";
+    private static final String SECURITY_CREDENTIALS_HBASE = "spark.yarn.security.credentials.hbase.enabled";
     private static final String PWD = "$PWD" + File.separator + "*";
     private static final String MASTER_OPTION = "--master";
     private static final String MODE_OPTION = "--deploy-mode";
@@ -115,8 +119,15 @@ class SparkArgsExtractor {
         if (jarPath != null && jarPath.endsWith(".py")) {
             pySpark = true;
         }
-        boolean addedHiveSecurityToken = false;
-        boolean addedHBaseSecurityToken = false;
+
+        boolean addedSecurityTokensHadoopFS = false;
+        boolean addedSecurityTokensHive = false;
+        boolean addedSecurityTokensHBase = false;
+
+        boolean addedSecurityCredentialsHadoopFS = false;
+        boolean addedSecurityCredentialsHive = false;
+        boolean addedSecurityCredentialsHBase = false;
+
         boolean addedLog4jDriverSettings = false;
         boolean addedLog4jExecutorSettings = false;
         final StringBuilder driverClassPath = new StringBuilder();
@@ -146,12 +157,27 @@ class SparkArgsExtractor {
                         addToSparkArgs = false;
                     }
                 }
-                if (opt.startsWith(HIVE_SECURITY_TOKEN)) {
-                    addedHiveSecurityToken = true;
+
+                if (opt.startsWith(SECURITY_TOKENS_HADOOPFS)) {
+                    addedSecurityTokensHadoopFS = true;
+                }
+                if (opt.startsWith(SECURITY_TOKENS_HIVE)) {
+                    addedSecurityTokensHive = true;
                 }
-                if (opt.startsWith(HBASE_SECURITY_TOKEN)) {
-                    addedHBaseSecurityToken = true;
+                if (opt.startsWith(SECURITY_TOKENS_HBASE)) {
+                    addedSecurityTokensHBase = true;
                 }
+
+                if (opt.startsWith(SECURITY_CREDENTIALS_HADOOPFS)) {
+                    addedSecurityCredentialsHadoopFS = true;
+                }
+                if (opt.startsWith(SECURITY_CREDENTIALS_HIVE)) {
+                    addedSecurityCredentialsHive = true;
+                }
+                if (opt.startsWith(SECURITY_CREDENTIALS_HBASE)) {
+                    addedSecurityCredentialsHBase = true;
+                }
+
                 if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS) || opt.startsWith(DRIVER_EXTRA_JAVA_OPTIONS))
{
                     if (!opt.contains(LOG4J_CONFIGURATION_JAVA_OPTION)) {
                         opt += " " + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS;
@@ -223,14 +249,32 @@ class SparkArgsExtractor {
             sparkArgs.add(SPARK_YARN_TAGS + OPT_SEPARATOR + actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS));
         }
 
-        if (!addedHiveSecurityToken) {
+        if (!addedSecurityTokensHadoopFS) {
             sparkArgs.add(CONF_OPTION);
-            sparkArgs.add(HIVE_SECURITY_TOKEN + OPT_SEPARATOR + Boolean.toString(false));
+            sparkArgs.add(SECURITY_TOKENS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false));
         }
-        if (!addedHBaseSecurityToken) {
+        if (!addedSecurityTokensHive) {
             sparkArgs.add(CONF_OPTION);
-            sparkArgs.add(HBASE_SECURITY_TOKEN + OPT_SEPARATOR + Boolean.toString(false));
+            sparkArgs.add(SECURITY_TOKENS_HIVE + OPT_SEPARATOR + Boolean.toString(false));
         }
+        if (!addedSecurityTokensHBase) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_TOKENS_HBASE + OPT_SEPARATOR + Boolean.toString(false));
+        }
+
+        if (!addedSecurityCredentialsHadoopFS) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_CREDENTIALS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false));
+        }
+        if (!addedSecurityCredentialsHive) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_CREDENTIALS_HIVE + OPT_SEPARATOR + Boolean.toString(false));
+        }
+        if (!addedSecurityCredentialsHBase) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_CREDENTIALS_HBASE + OPT_SEPARATOR + Boolean.toString(false));
+        }
+
         if (!addedLog4jExecutorSettings) {
             sparkArgs.add(CONF_OPTION);
             sparkArgs.add(EXECUTOR_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION +
SparkMain.SPARK_LOG4J_PROPS);

http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
----------------------------------------------------------------------
diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
index d0541ca..574bf24 100644
--- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
+++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
@@ -73,8 +73,12 @@ public class TestSparkArgsExtractor {
                         "--conf", "user.property.after.spark.driver.extraClassPath=ddd",
                         "--conf", "spark.executor.extraJavaOptions=-XX:+HeapDumpOnOutOfMemoryError
" +
                                 "-XX:HeapDumpPath=/tmp -Dlog4j.configuration=spark-log4j.properties",
+                        "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hive.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hbase.enabled=false",
                         "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--verbose",
                         "/lib/test.jar",
@@ -113,8 +117,12 @@ public class TestSparkArgsExtractor {
                                 "-XX:HeapDumpPath=/tmp -Dlog4j.configuration=spark-log4j.properties",
                         "--conf", "spark.executor.extraClassPath=aaa:$PWD/*",
                         "--conf", "spark.driver.extraClassPath=ccc:$PWD/*",
+                        "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hive.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hbase.enabled=false",
                         "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--files", "spark-log4j.properties,hive-site.xml",
                         "--conf", "spark.yarn.jar=null",
@@ -148,8 +156,12 @@ public class TestSparkArgsExtractor {
                         "--class", "org.apache.oozie.example.SparkFileCopy",
                         "--conf", "spark.executor.extraClassPath=$PWD/*",
                         "--conf", "spark.driver.extraClassPath=$PWD/*",
+                        "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hive.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hbase.enabled=false",
                         "--conf", "spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--files", "spark-log4j.properties,hive-site.xml,aaa,ccc",
@@ -182,8 +194,12 @@ public class TestSparkArgsExtractor {
                         "--class", "org.apache.oozie.example.SparkFileCopy",
                         "--conf", "spark.executor.extraClassPath=$PWD/*",
                         "--conf", "spark.driver.extraClassPath=aaa:$PWD/*",
+                        "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hive.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hbase.enabled=false",
                         "--conf", "spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--files", "spark-log4j.properties,hive-site.xml",
@@ -221,8 +237,12 @@ public class TestSparkArgsExtractor {
                                 "-Dlog4j.configuration=spark-log4j.properties",
                         "--conf", "spark.executor.extraClassPath=$PWD/*",
                         "--conf", "spark.driver.extraClassPath=$PWD/*",
+                        "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hive.enabled=false",
                         "--conf", "spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", "spark.yarn.security.credentials.hbase.enabled=false",
                         "--files", "spark-log4j.properties,hive-site.xml",
                         "--conf", "spark.yarn.jar=null",
                         "--verbose",


Mime
View raw message