From commits-return-4821-apmail-oozie-commits-archive=oozie.apache.org@oozie.apache.org Tue Aug 15 09:46:20 2017 Return-Path: X-Original-To: apmail-oozie-commits-archive@www.apache.org Delivered-To: apmail-oozie-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 4665219BA9 for ; Tue, 15 Aug 2017 09:46:20 +0000 (UTC) Received: (qmail 20430 invoked by uid 500); 15 Aug 2017 09:46:20 -0000 Delivered-To: apmail-oozie-commits-archive@oozie.apache.org Received: (qmail 20411 invoked by uid 500); 15 Aug 2017 09:46:20 -0000 Mailing-List: contact commits-help@oozie.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@oozie.apache.org Delivered-To: mailing list commits@oozie.apache.org Received: (qmail 20402 invoked by uid 99); 15 Aug 2017 09:46:19 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 15 Aug 2017 09:46:19 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 4F234DFAFF; Tue, 15 Aug 2017 09:46:19 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: gezapeti@apache.org To: commits@oozie.apache.org Message-Id: <53d750d41df540d7a496f83d91e966c1@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: oozie git commit: OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros via gezapeti) Date: Tue, 15 Aug 2017 09:46:19 +0000 (UTC) Repository: oozie Updated Branches: refs/heads/master 630171297 -> 04a6d0541 OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros via gezapeti) Project: http://git-wip-us.apache.org/repos/asf/oozie/repo Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/04a6d054 Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/04a6d054 Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/04a6d054 Branch: refs/heads/master Commit: 04a6d0541e83ab457db7c79e86986bb69a8eb432 Parents: 6301712 Author: Gezapeti Cseh Authored: Tue Aug 15 11:45:39 2017 +0200 Committer: Gezapeti Cseh Committed: Tue Aug 15 11:46:14 2017 +0200 ---------------------------------------------------------------------- release-log.txt | 1 + .../oozie/action/hadoop/SparkArgsExtractor.java | 68 ++++++++++++++++---- .../action/hadoop/TestSparkArgsExtractor.java | 20 ++++++ 3 files changed, 77 insertions(+), 12 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/release-log.txt ---------------------------------------------------------------------- diff --git a/release-log.txt b/release-log.txt index 0b509bf..f3a1b46 100644 --- a/release-log.txt +++ b/release-log.txt @@ -1,5 +1,6 @@ -- Oozie 5.0.0 release (trunk - unreleased) +OOZIE-3036 Spark 2.2.0 support: tell Spark not to get any delegation tokens (andras.piros via gezapeti) OOZIE-3028 Oozie Pig Action fails with no python dependencies (dbist13 via rohini) OOZIE-2670 Upgrade Hbase to 1.2 (gezapeti via asasvari) OOZIE-2608 Comma in oozie.service.JPAService.jdbc.password value results in authentication error (gezapeti via asasvari) http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java ---------------------------------------------------------------------- diff --git a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java index ffc95f9..052950f 100644 --- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java +++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java @@ -41,8 +41,12 @@ class SparkArgsExtractor { private static final String FILES_OPTION = "--files"; private static final String ARCHIVES_OPTION = "--archives"; private static final String LOG4J_CONFIGURATION_JAVA_OPTION = "-Dlog4j.configuration="; - private static final String HIVE_SECURITY_TOKEN = "spark.yarn.security.tokens.hive.enabled"; - private static final String HBASE_SECURITY_TOKEN = "spark.yarn.security.tokens.hbase.enabled"; + private static final String SECURITY_TOKENS_HADOOPFS = "spark.yarn.security.tokens.hadoopfs.enabled"; + private static final String SECURITY_TOKENS_HIVE = "spark.yarn.security.tokens.hive.enabled"; + private static final String SECURITY_TOKENS_HBASE = "spark.yarn.security.tokens.hbase.enabled"; + private static final String SECURITY_CREDENTIALS_HADOOPFS = "spark.yarn.security.credentials.hadoopfs.enabled"; + private static final String SECURITY_CREDENTIALS_HIVE = "spark.yarn.security.credentials.hive.enabled"; + private static final String SECURITY_CREDENTIALS_HBASE = "spark.yarn.security.credentials.hbase.enabled"; private static final String PWD = "$PWD" + File.separator + "*"; private static final String MASTER_OPTION = "--master"; private static final String MODE_OPTION = "--deploy-mode"; @@ -115,8 +119,15 @@ class SparkArgsExtractor { if (jarPath != null && jarPath.endsWith(".py")) { pySpark = true; } - boolean addedHiveSecurityToken = false; - boolean addedHBaseSecurityToken = false; + + boolean addedSecurityTokensHadoopFS = false; + boolean addedSecurityTokensHive = false; + boolean addedSecurityTokensHBase = false; + + boolean addedSecurityCredentialsHadoopFS = false; + boolean addedSecurityCredentialsHive = false; + boolean addedSecurityCredentialsHBase = false; + boolean addedLog4jDriverSettings = false; boolean addedLog4jExecutorSettings = false; final StringBuilder driverClassPath = new StringBuilder(); @@ -146,12 +157,27 @@ class SparkArgsExtractor { addToSparkArgs = false; } } - if (opt.startsWith(HIVE_SECURITY_TOKEN)) { - addedHiveSecurityToken = true; + + if (opt.startsWith(SECURITY_TOKENS_HADOOPFS)) { + addedSecurityTokensHadoopFS = true; + } + if (opt.startsWith(SECURITY_TOKENS_HIVE)) { + addedSecurityTokensHive = true; } - if (opt.startsWith(HBASE_SECURITY_TOKEN)) { - addedHBaseSecurityToken = true; + if (opt.startsWith(SECURITY_TOKENS_HBASE)) { + addedSecurityTokensHBase = true; } + + if (opt.startsWith(SECURITY_CREDENTIALS_HADOOPFS)) { + addedSecurityCredentialsHadoopFS = true; + } + if (opt.startsWith(SECURITY_CREDENTIALS_HIVE)) { + addedSecurityCredentialsHive = true; + } + if (opt.startsWith(SECURITY_CREDENTIALS_HBASE)) { + addedSecurityCredentialsHBase = true; + } + if (opt.startsWith(EXECUTOR_EXTRA_JAVA_OPTIONS) || opt.startsWith(DRIVER_EXTRA_JAVA_OPTIONS)) { if (!opt.contains(LOG4J_CONFIGURATION_JAVA_OPTION)) { opt += " " + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS; @@ -223,14 +249,32 @@ class SparkArgsExtractor { sparkArgs.add(SPARK_YARN_TAGS + OPT_SEPARATOR + actionConf.get(LauncherMain.MAPREDUCE_JOB_TAGS)); } - if (!addedHiveSecurityToken) { + if (!addedSecurityTokensHadoopFS) { sparkArgs.add(CONF_OPTION); - sparkArgs.add(HIVE_SECURITY_TOKEN + OPT_SEPARATOR + Boolean.toString(false)); + sparkArgs.add(SECURITY_TOKENS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); } - if (!addedHBaseSecurityToken) { + if (!addedSecurityTokensHive) { sparkArgs.add(CONF_OPTION); - sparkArgs.add(HBASE_SECURITY_TOKEN + OPT_SEPARATOR + Boolean.toString(false)); + sparkArgs.add(SECURITY_TOKENS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); } + if (!addedSecurityTokensHBase) { + sparkArgs.add(CONF_OPTION); + sparkArgs.add(SECURITY_TOKENS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); + } + + if (!addedSecurityCredentialsHadoopFS) { + sparkArgs.add(CONF_OPTION); + sparkArgs.add(SECURITY_CREDENTIALS_HADOOPFS + OPT_SEPARATOR + Boolean.toString(false)); + } + if (!addedSecurityCredentialsHive) { + sparkArgs.add(CONF_OPTION); + sparkArgs.add(SECURITY_CREDENTIALS_HIVE + OPT_SEPARATOR + Boolean.toString(false)); + } + if (!addedSecurityCredentialsHBase) { + sparkArgs.add(CONF_OPTION); + sparkArgs.add(SECURITY_CREDENTIALS_HBASE + OPT_SEPARATOR + Boolean.toString(false)); + } + if (!addedLog4jExecutorSettings) { sparkArgs.add(CONF_OPTION); sparkArgs.add(EXECUTOR_EXTRA_JAVA_OPTIONS + LOG4J_CONFIGURATION_JAVA_OPTION + SparkMain.SPARK_LOG4J_PROPS); http://git-wip-us.apache.org/repos/asf/oozie/blob/04a6d054/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java ---------------------------------------------------------------------- diff --git a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java index d0541ca..574bf24 100644 --- a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java +++ b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java @@ -73,8 +73,12 @@ public class TestSparkArgsExtractor { "--conf", "user.property.after.spark.driver.extraClassPath=ddd", "--conf", "spark.executor.extraJavaOptions=-XX:+HeapDumpOnOutOfMemoryError " + "-XX:HeapDumpPath=/tmp -Dlog4j.configuration=spark-log4j.properties", + "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false", "--conf", "spark.yarn.security.tokens.hive.enabled=false", "--conf", "spark.yarn.security.tokens.hbase.enabled=false", + "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false", + "--conf", "spark.yarn.security.credentials.hive.enabled=false", + "--conf", "spark.yarn.security.credentials.hbase.enabled=false", "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--verbose", "/lib/test.jar", @@ -113,8 +117,12 @@ public class TestSparkArgsExtractor { "-XX:HeapDumpPath=/tmp -Dlog4j.configuration=spark-log4j.properties", "--conf", "spark.executor.extraClassPath=aaa:$PWD/*", "--conf", "spark.driver.extraClassPath=ccc:$PWD/*", + "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false", "--conf", "spark.yarn.security.tokens.hive.enabled=false", "--conf", "spark.yarn.security.tokens.hbase.enabled=false", + "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false", + "--conf", "spark.yarn.security.credentials.hive.enabled=false", + "--conf", "spark.yarn.security.credentials.hbase.enabled=false", "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--files", "spark-log4j.properties,hive-site.xml", "--conf", "spark.yarn.jar=null", @@ -148,8 +156,12 @@ public class TestSparkArgsExtractor { "--class", "org.apache.oozie.example.SparkFileCopy", "--conf", "spark.executor.extraClassPath=$PWD/*", "--conf", "spark.driver.extraClassPath=$PWD/*", + "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false", "--conf", "spark.yarn.security.tokens.hive.enabled=false", "--conf", "spark.yarn.security.tokens.hbase.enabled=false", + "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false", + "--conf", "spark.yarn.security.credentials.hive.enabled=false", + "--conf", "spark.yarn.security.credentials.hbase.enabled=false", "--conf", "spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--files", "spark-log4j.properties,hive-site.xml,aaa,ccc", @@ -182,8 +194,12 @@ public class TestSparkArgsExtractor { "--class", "org.apache.oozie.example.SparkFileCopy", "--conf", "spark.executor.extraClassPath=$PWD/*", "--conf", "spark.driver.extraClassPath=aaa:$PWD/*", + "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false", "--conf", "spark.yarn.security.tokens.hive.enabled=false", "--conf", "spark.yarn.security.tokens.hbase.enabled=false", + "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false", + "--conf", "spark.yarn.security.credentials.hive.enabled=false", + "--conf", "spark.yarn.security.credentials.hbase.enabled=false", "--conf", "spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--conf", "spark.driver.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties", "--files", "spark-log4j.properties,hive-site.xml", @@ -221,8 +237,12 @@ public class TestSparkArgsExtractor { "-Dlog4j.configuration=spark-log4j.properties", "--conf", "spark.executor.extraClassPath=$PWD/*", "--conf", "spark.driver.extraClassPath=$PWD/*", + "--conf", "spark.yarn.security.tokens.hadoopfs.enabled=false", "--conf", "spark.yarn.security.tokens.hive.enabled=false", "--conf", "spark.yarn.security.tokens.hbase.enabled=false", + "--conf", "spark.yarn.security.credentials.hadoopfs.enabled=false", + "--conf", "spark.yarn.security.credentials.hive.enabled=false", + "--conf", "spark.yarn.security.credentials.hbase.enabled=false", "--files", "spark-log4j.properties,hive-site.xml", "--conf", "spark.yarn.jar=null", "--verbose",