knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pzamp...@apache.org
Subject [knox] branch master updated: KNOX-1880 - Support doAs for Cloudera Manager service discovery API interactions
Date Thu, 20 Jun 2019 19:50:51 GMT
This is an automated email from the ASF dual-hosted git repository.

pzampino pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/knox.git


The following commit(s) were added to refs/heads/master by this push:
     new 9f3e1a3  KNOX-1880 - Support doAs for Cloudera Manager service discovery API interactions
9f3e1a3 is described below

commit 9f3e1a3c354646c17b4d79dce3e5557889afe78c
Author: pzampino <pzampino@cloudera.com>
AuthorDate: Tue Jun 18 16:53:00 2019 -0400

    KNOX-1880 - Support doAs for Cloudera Manager service discovery API interactions
---
 gateway-discovery-cm/pom.xml                       |   8 +-
 .../cm/ClouderaManagerServiceDiscovery.java        | 247 +++-----------------
 .../ClouderaManagerServiceDiscoveryMessages.java   |  29 +--
 .../topology/discovery/cm/DiscoveryApiClient.java  |  47 +++-
 .../topology/discovery/cm/auth/AuthUtils.java      |  69 ++++++
 .../discovery/cm/auth/ConfigurationFactory.java    |  64 ++++++
 .../discovery/cm/auth/JAASClientConfig.java        |  51 ++++
 .../discovery/cm/auth/SpnegoAuthInterceptor.java   | 248 ++++++++++++++++++++
 .../cm/ClouderaManagerServiceDiscoveryTest.java    | 256 ++++++++++++++++-----
 9 files changed, 729 insertions(+), 290 deletions(-)

diff --git a/gateway-discovery-cm/pom.xml b/gateway-discovery-cm/pom.xml
index 46371f1..90a6102 100644
--- a/gateway-discovery-cm/pom.xml
+++ b/gateway-discovery-cm/pom.xml
@@ -47,7 +47,6 @@
             <artifactId>gateway-util-configinjector</artifactId>
             <scope>compile</scope>
         </dependency>
-
         <dependency>
             <groupId>com.cloudera.api.swagger</groupId>
             <artifactId>cloudera-manager-api-swagger</artifactId>
@@ -56,7 +55,12 @@
         <dependency>
             <groupId>com.squareup.okhttp</groupId>
             <artifactId>okhttp</artifactId>
-            <scope>test</scope>
+            <version>2.7.5</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpcore</artifactId>
+            <scope>compile</scope>
         </dependency>
     </dependencies>
 
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscovery.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscovery.java
index 948f9d3..6b53095 100644
--- a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscovery.java
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscovery.java
@@ -30,21 +30,11 @@ import com.cloudera.api.swagger.model.ApiServiceConfig;
 import com.cloudera.api.swagger.model.ApiServiceList;
 import org.apache.knox.gateway.config.GatewayConfig;
 import org.apache.knox.gateway.i18n.messages.MessagesFactory;
-import org.apache.knox.gateway.security.SubjectUtils;
 import org.apache.knox.gateway.services.security.AliasService;
 import org.apache.knox.gateway.topology.discovery.GatewayService;
 import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
 import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
 
-import javax.security.auth.Subject;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.LoginContext;
-import java.io.File;
-import java.lang.reflect.Constructor;
-import java.net.URI;
-import java.net.URL;
-import java.security.PrivilegedAction;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -64,8 +54,6 @@ public class ClouderaManagerServiceDiscovery implements ServiceDiscovery {
   private static final ClouderaManagerServiceDiscoveryMessages log =
                                         MessagesFactory.get(ClouderaManagerServiceDiscoveryMessages.class);
 
-  private static final String JGSS_LOGIN_MODULE = "com.sun.security.jgss.initiate";
-
   static final String API_PATH = "api/v32";
 
   private static final String CLUSTER_TYPE_ANY = "any";
@@ -152,34 +140,17 @@ public class ClouderaManagerServiceDiscovery implements ServiceDiscovery {
   private static List<ApiCluster> getClusters(DiscoveryApiClient client) {
     List<ApiCluster> clusters = new ArrayList<>();
     try {
-      ApiClusterList clusterList = null;
-
       ClustersResourceApi clustersResourceApi = new ClustersResourceApi(client);
-      if (client.isKerberos()) {
-        clusterList =
-            Subject.doAs(getSubject(), (PrivilegedAction<ApiClusterList>) () -> {
-              try {
-                return clustersResourceApi.readClusters(CLUSTER_TYPE_ANY, VIEW_SUMMARY);
-              } catch (Exception e) {
-                log.clusterDiscoveryError(CLUSTER_TYPE_ANY, e);
-              }
-              return null;
-            });
-      } else {
-          clusterList = clustersResourceApi.readClusters(CLUSTER_TYPE_ANY, VIEW_SUMMARY);
-      }
-
+      ApiClusterList clusterList = clustersResourceApi.readClusters(CLUSTER_TYPE_ANY, VIEW_SUMMARY);
       if (clusterList != null) {
         clusters.addAll(clusterList.getItems());
       }
     } catch (Exception e) {
-      log.clusterDiscoveryError(CLUSTER_TYPE_ANY, e); // TODO: PJZ: Better error message here?
+      log.clusterDiscoveryError(CLUSTER_TYPE_ANY, e);
     }
-
     return clusters;
   }
 
-
   private static Cluster discoverCluster(DiscoveryApiClient client, String clusterName) throws ApiException {
     ClouderaManagerCluster cluster = null;
 
@@ -193,20 +164,20 @@ public class ClouderaManagerServiceDiscovery implements ServiceDiscovery {
     Set<ServiceModel> serviceModels = new HashSet<>();
     ServiceLoader<ServiceModelGenerator> loader = ServiceLoader.load(ServiceModelGenerator.class);
 
-    ApiServiceList serviceList = getClusterServices(servicesResourceApi, clusterName, client.isKerberos());
+    ApiServiceList serviceList = getClusterServices(servicesResourceApi, clusterName);
     if (serviceList != null) {
       for (ApiService service : serviceList.getItems()) {
         String serviceName = service.getName();
         log.discoveredService(serviceName, service.getType());
         ApiServiceConfig serviceConfig =
-            getServiceConfig(servicesResourceApi, clusterName, serviceName, client.isKerberos());
-        ApiRoleList roleList = getRoles(rolesResourceApi, clusterName, serviceName, client.isKerberos());
+            getServiceConfig(servicesResourceApi, clusterName, serviceName);
+        ApiRoleList roleList = getRoles(rolesResourceApi, clusterName, serviceName);
         if (roleList != null) {
           for (ApiRole role : roleList.getItems()) {
             String roleName = role.getName();
             log.discoveredServiceRole(roleName, role.getType());
             ApiConfigList roleConfig =
-                getRoleConfig(rolesResourceApi, clusterName, serviceName, roleName, client.isKerberos());
+                getRoleConfig(rolesResourceApi, clusterName, serviceName, roleName);
 
             for (ServiceModelGenerator serviceModelGenerator : loader) {
               if (serviceModelGenerator.handles(service, serviceConfig, role, roleConfig)) {
@@ -226,205 +197,51 @@ public class ClouderaManagerServiceDiscovery implements ServiceDiscovery {
   }
 
   private static ApiServiceList getClusterServices(final ServicesResourceApi servicesResourceApi,
-                                                   final String              clusterName,
-                                                   final boolean             isKerberos) {
-    ApiServiceList serviceList = null;
-    if (isKerberos) {
-      serviceList =
-          Subject.doAs(getSubject(), (PrivilegedAction<ApiServiceList>) () -> {
-            try {
-              return servicesResourceApi.readServices(clusterName, VIEW_SUMMARY);
-            } catch (Exception e) {
-              log.failedToAccessServiceConfigs(clusterName, e);
-            }
-            return null;
-          });
-    } else {
-      try {
-        serviceList = servicesResourceApi.readServices(clusterName, VIEW_SUMMARY);
-      } catch (ApiException e) {
-        log.failedToAccessServiceConfigs(clusterName, e);
-      }
+                                                   final String              clusterName) {
+    ApiServiceList services = null;
+    try {
+      services = servicesResourceApi.readServices(clusterName, VIEW_SUMMARY);
+    } catch (ApiException e) {
+      log.failedToAccessServiceConfigs(clusterName, e);
     }
-    return serviceList;
+    return services;
   }
 
   private static ApiServiceConfig getServiceConfig(final ServicesResourceApi servicesResourceApi,
                                                    final String clusterName,
-                                                   final String serviceName,
-                                                   final boolean isKerberos) {
+                                                   final String serviceName) {
     ApiServiceConfig serviceConfig = null;
-    if (isKerberos) {
-      serviceConfig =
-          Subject.doAs(getSubject(), (PrivilegedAction<ApiServiceConfig>) () -> {
-            try {
-              return servicesResourceApi.readServiceConfig(clusterName, serviceName, VIEW_FULL);
-            } catch (Exception e) {
-              log.failedToAccessServiceConfigs(clusterName, e);
-            }
-            return null;
-          });
-    } else {
-      try {
-        serviceConfig = servicesResourceApi.readServiceConfig(clusterName, serviceName, VIEW_FULL);
-      } catch (Exception e) {
-        log.failedToAccessServiceConfigs(clusterName, e);
-      }
+    try {
+      serviceConfig = servicesResourceApi.readServiceConfig(clusterName, serviceName, VIEW_FULL);
+    } catch (Exception e) {
+      log.failedToAccessServiceConfigs(clusterName, e);
     }
     return serviceConfig;
   }
 
   private static ApiRoleList getRoles(RolesResourceApi rolesResourceApi,
                                       String clusterName,
-                                      String serviceName,
-                                      boolean isKerberos) {
-    ApiRoleList roleList = null;
-
-    if (isKerberos) {
-      roleList =
-          Subject.doAs(getSubject(), (PrivilegedAction<ApiRoleList>) () -> {
-            try {
-              return rolesResourceApi.readRoles(clusterName, serviceName, "", VIEW_SUMMARY);
-            } catch (Exception e) {
-              log.failedToAccessServiceRoleConfigs(clusterName, e);
-            }
-            return null;
-          });
-    } else {
-      try {
-        roleList = rolesResourceApi.readRoles(clusterName, serviceName, "", VIEW_SUMMARY);
-      } catch (ApiException e) {
-        log.failedToAccessServiceRoleConfigs(clusterName, e);
-      }
+                                      String serviceName) {
+    ApiRoleList roles = null;
+    try {
+      roles = rolesResourceApi.readRoles(clusterName, serviceName, "", VIEW_SUMMARY);
+    } catch (Exception e) {
+      log.failedToAccessServiceRoleConfigs(clusterName, e);
     }
-
-    return roleList;
+    return roles;
   }
 
   private static ApiConfigList getRoleConfig(RolesResourceApi rolesResourceApi,
                                              String           clusterName,
                                              String           serviceName,
-                                             String           roleName,
-                                             boolean          isKerberos) {
-    ApiConfigList roleConfig = null;
-    if (isKerberos) {
-      roleConfig =
-          Subject.doAs(getSubject(), (PrivilegedAction<ApiConfigList>) () -> {
-            try {
-              return rolesResourceApi.readRoleConfig(clusterName, roleName, serviceName, VIEW_FULL);
-            } catch (Exception e) {
-              log.failedToAccessServiceRoleConfigs(clusterName, e);
-            }
-            return null;
-          });
-    } else {
-      try {
-        roleConfig = rolesResourceApi.readRoleConfig(clusterName, roleName, serviceName, VIEW_FULL);
-      } catch (ApiException e) {
-        log.failedToAccessServiceRoleConfigs(clusterName, e);
-      }
-    }
-    return roleConfig;
-  }
-
-  private static Subject getSubject() {
-    Subject subject = SubjectUtils.getCurrentSubject();
-    if (subject == null) {
-      subject = login();
-    }
-    return subject;
-  }
-
-  private static Subject login() {
-    Subject subject = null;
-    String kerberosLoginConfig = getKerberosLoginConfig();
-    if (kerberosLoginConfig != null) {
-      try {
-        Configuration jaasConf = new JAASClientConfig((new File(kerberosLoginConfig)).toURI().toURL());
-        LoginContext lc = new LoginContext(JGSS_LOGIN_MODULE,
-                                           null,
-                                           null,
-                                           jaasConf);
-        lc.login();
-        subject = lc.getSubject();
-      } catch (Exception e) {
-        log.failedKerberosLogin(kerberosLoginConfig, JGSS_LOGIN_MODULE, e);
-      }
-    }
-
-    return subject;
-  }
-
-  private static final class JAASClientConfig extends Configuration {
-
-    private static final Configuration baseConfig = Configuration.getConfiguration();
-
-    private Configuration configFile;
-
-    JAASClientConfig(URL configFileURL) throws Exception {
-      if (configFileURL != null) {
-        this.configFile = ConfigurationFactory.create(configFileURL.toURI());
-      }
-    }
-
-    @Override
-    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
-      AppConfigurationEntry[] result = null;
-
-      // Try the config file if it exists
-      if (configFile != null) {
-        result = configFile.getAppConfigurationEntry(name);
-      }
-
-      // If the entry isn't there, delegate to the base configuration
-      if (result == null) {
-        result = baseConfig.getAppConfigurationEntry(name);
-      }
-
-      return result;
-    }
-  }
-
-  @SuppressWarnings("PMD.AvoidAccessibilityAlteration")
-  private static class ConfigurationFactory {
-
-    private static final Class implClazz;
-    static {
-      // Oracle and OpenJDK use the Sun implementation
-      String implName = System.getProperty("java.vendor").contains("IBM") ?
-          "com.ibm.security.auth.login.ConfigFile" : "com.sun.security.auth.login.ConfigFile";
-
-      log.usingJAASConfigurationFileImplementation(implName);
-      Class clazz = null;
-      try {
-        clazz = Class.forName(implName, false, Thread.currentThread().getContextClassLoader());
-      } catch (ClassNotFoundException e) {
-        log.failedToLoadJAASConfigurationFileImplementation(implName, e);
-      }
-
-      implClazz = clazz;
-    }
-
-    static Configuration create(URI uri) {
-      Configuration config = null;
-
-      if (implClazz != null) {
-        try {
-          Constructor ctor = implClazz.getDeclaredConstructor(URI.class);
-          config = (Configuration) ctor.newInstance(uri);
-        } catch (Exception e) {
-          log.failedToInstantiateJAASConfigurationFileImplementation(implClazz.getCanonicalName(), e);
-        }
-      } else {
-        log.noJAASConfigurationFileImplementation();
-      }
-
-      return config;
+                                             String           roleName) {
+    ApiConfigList configList = null;
+    try {
+      configList = rolesResourceApi.readRoleConfig(clusterName, roleName, serviceName, VIEW_FULL);
+    } catch (Exception e) {
+      log.failedToAccessServiceRoleConfigs(clusterName, e);
     }
-  }
-
-  private static String getKerberosLoginConfig() {
-    return System.getProperty(GatewayConfig.KRB5_LOGIN_CONFIG, "");
+    return configList;
   }
 
 }
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryMessages.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryMessages.java
index 21e2049..8bc81d1 100644
--- a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryMessages.java
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryMessages.java
@@ -36,8 +36,11 @@ public interface ClouderaManagerServiceDiscoveryMessages {
   @Message(level = MessageLevel.INFO, text = "Discovered service role: {0} ({1})")
   void discoveredServiceRole(String roleName, String roleType);
 
+  @Message(level = MessageLevel.INFO, text = "Attempting to authenticate Knox using {0} ...")
+  void attemptingKerberosLogin(String loginConfigPath);
+
   @Message(level = MessageLevel.ERROR,
-      text = "Failed Kerberos login {0} ({1}): {2}")
+           text = "Failed Kerberos login {0} ({1}): {2}")
   void failedKerberosLogin(String jaasLoginConfig,
                            String entryName,
                            @StackTrace(level = MessageLevel.DEBUG) Exception e);
@@ -46,52 +49,52 @@ public interface ClouderaManagerServiceDiscoveryMessages {
   void usingJAASConfigurationFileImplementation(String implementation);
 
   @Message(level = MessageLevel.ERROR,
-      text = "Failed to load JAAS configuration file implementation {0}: {1}")
+           text = "Failed to load JAAS configuration file implementation {0}: {1}")
   void failedToLoadJAASConfigurationFileImplementation(String implementationName,
                                                        @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
   @Message(level = MessageLevel.ERROR,
-      text = "Failed to instantiate JAAS configuration file implementation {0}: {1}")
+           text = "Failed to instantiate JAAS configuration file implementation {0}: {1}")
   void failedToInstantiateJAASConfigurationFileImplementation(String implementationName,
-                                                       @StackTrace(level = MessageLevel.DEBUG) Exception e);
+                                                              @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
   @Message(level = MessageLevel.ERROR, text = "No JAAS configuration file implementation found.")
   void noJAASConfigurationFileImplementation();
 
   @Message(level = MessageLevel.ERROR,
-      text = "Encountered an error during cluster ({0}) discovery: {1}")
+           text = "Encountered an error during cluster ({0}) discovery: {1}")
   void clusterDiscoveryError(String clusterName, @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
   @Message(level = MessageLevel.ERROR,
-      text = "Failed to access the service configurations for cluster ({0}) discovery")
+           text = "Failed to access the service configurations for cluster ({0}) discovery: {1}")
   void failedToAccessServiceConfigs(String clusterName, @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
   @Message(level = MessageLevel.ERROR,
-      text = "Failed to access the service role configurations for cluster ({0}) discovery")
+           text = "Failed to access the service role configurations for cluster ({0}) discovery: {1}")
   void failedToAccessServiceRoleConfigs(String clusterName, @StackTrace(level = MessageLevel.DEBUG) Exception e);
 
   @Message(level = MessageLevel.ERROR,
-      text = "No address for Cloudera Manager service discovery has been configured.")
+           text = "No address for Cloudera Manager service discovery has been configured.")
   void missingDiscoveryAddress();
 
   @Message(level = MessageLevel.ERROR,
-      text = "No cluster for Cloudera Manager service discovery has been configured.")
+           text = "No cluster for Cloudera Manager service discovery has been configured.")
   void missingDiscoveryCluster();
 
   @Message(level = MessageLevel.ERROR,
-      text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+           text = "Encountered an error attempting to determine the user for alias {0} : {1}")
   void aliasServiceUserError(String alias, String error);
 
   @Message(level = MessageLevel.ERROR,
-      text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+           text = "Encountered an error attempting to determine the password for alias {0} : {1}")
   void aliasServicePasswordError(String alias, String error);
 
   @Message(level = MessageLevel.ERROR,
-      text = "No user configured for Cloudera Manager service discovery.")
+           text = "No user configured for Cloudera Manager service discovery.")
   void aliasServiceUserNotFound();
 
   @Message(level = MessageLevel.ERROR,
-      text = "No password configured for Cloudera Manager service discovery.")
+           text = "No password configured for Cloudera Manager service discovery.")
   void aliasServicePasswordNotFound();
 
 }
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/DiscoveryApiClient.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/DiscoveryApiClient.java
index 901586c..1f1c854 100644
--- a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/DiscoveryApiClient.java
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/DiscoveryApiClient.java
@@ -17,18 +17,28 @@
 package org.apache.knox.gateway.topology.discovery.cm;
 
 import com.cloudera.api.swagger.client.ApiClient;
+import com.cloudera.api.swagger.client.Pair;
+import com.cloudera.api.swagger.client.auth.Authentication;
+import com.cloudera.api.swagger.client.auth.HttpBasicAuth;
 import org.apache.knox.gateway.config.ConfigurationException;
 import org.apache.knox.gateway.config.GatewayConfig;
 import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 import org.apache.knox.gateway.services.security.AliasService;
 import org.apache.knox.gateway.services.security.AliasServiceException;
 import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.knox.gateway.topology.discovery.cm.auth.AuthUtils;
+import org.apache.knox.gateway.topology.discovery.cm.auth.SpnegoAuthInterceptor;
+
+import javax.security.auth.Subject;
+import java.util.List;
 
 import static org.apache.knox.gateway.topology.discovery.cm.ClouderaManagerServiceDiscovery.API_PATH;
 import static org.apache.knox.gateway.topology.discovery.cm.ClouderaManagerServiceDiscovery.DEFAULT_USER_ALIAS;
 import static org.apache.knox.gateway.topology.discovery.cm.ClouderaManagerServiceDiscovery.DEFAULT_PWD_ALIAS;
 
-
+/**
+ * Cloudera Manager ApiClient extension for service discovery.
+ */
 public class DiscoveryApiClient extends ApiClient {
 
   private ClouderaManagerServiceDiscoveryMessages log =
@@ -99,6 +109,41 @@ public class DiscoveryApiClient extends ApiClient {
 
     setUsername(username);
     setPassword(password);
+
+    if (isKerberos) {
+      // If there is a Kerberos subject, then add the SPNEGO auth interceptor
+      Subject subject = AuthUtils.getKerberosSubject();
+      if (subject != null) {
+        SpnegoAuthInterceptor spnegoInterceptor = new SpnegoAuthInterceptor(subject);
+        getHttpClient().interceptors().add(spnegoInterceptor);
+      }
+    }
+  }
+
+  @Override
+  public String buildUrl(String path, List<Pair> queryParams) {
+    // If kerberos is enabled, then for every request, we're going to include a doAs query param
+    if (isKerberos()) {
+      String user = getUsername();
+      if (user != null) {
+        queryParams.add(new Pair("doAs", user));
+      }
+    }
+    return super.buildUrl(path, queryParams);
+  }
+
+  /**
+   * @return The username set from the discovery configuration when this instance was initialized.
+   */
+  private String getUsername() {
+    String username = null;
+    Authentication basicAuth = getAuthentication("basic");
+    if (basicAuth != null) {
+      if (basicAuth instanceof HttpBasicAuth) {
+        username = ((HttpBasicAuth) basicAuth).getUsername();
+      }
+    }
+    return username;
   }
 
 }
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/AuthUtils.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/AuthUtils.java
new file mode 100644
index 0000000..d7404d8
--- /dev/null
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/AuthUtils.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.cm.auth;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.topology.discovery.cm.ClouderaManagerServiceDiscoveryMessages;
+
+import javax.security.auth.Subject;
+import javax.security.auth.login.Configuration;
+import javax.security.auth.login.LoginContext;
+import java.io.File;
+
+public class AuthUtils {
+
+  private static final String JGSS_LOGIN_MODULE = "com.sun.security.jgss.initiate";
+
+  private static ClouderaManagerServiceDiscoveryMessages log =
+      MessagesFactory.get(ClouderaManagerServiceDiscoveryMessages.class);
+
+
+  public static String getKerberosLoginConfiguration() {
+    return System.getProperty(GatewayConfig.KRB5_LOGIN_CONFIG);
+  }
+
+  public static Configuration createKerberosJAASConfiguration() throws Exception {
+    return createKerberosJAASConfiguration(getKerberosLoginConfiguration());
+  }
+
+  public static Configuration createKerberosJAASConfiguration(String kerberosLoginConfig) throws Exception {
+    if (kerberosLoginConfig == null) {
+      throw new IllegalArgumentException("Invalid login configuration.");
+    }
+    return new JAASClientConfig((new File(kerberosLoginConfig)).toURI().toURL());
+  }
+
+  public static Subject getKerberosSubject() {
+    Subject subject = null;
+    String kerberosLoginConfig = getKerberosLoginConfiguration();
+    if (kerberosLoginConfig != null) {
+      log.attemptingKerberosLogin(kerberosLoginConfig);
+      try {
+        Configuration jaasConf = new JAASClientConfig((new File(kerberosLoginConfig)).toURI().toURL());
+        LoginContext lc = new LoginContext(JGSS_LOGIN_MODULE, null, null, jaasConf);
+        lc.login();
+        subject = lc.getSubject();
+      } catch (Exception e) {
+        log.failedKerberosLogin(kerberosLoginConfig, JGSS_LOGIN_MODULE, e);
+      }
+    }
+
+    return subject;
+  }
+
+}
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/ConfigurationFactory.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/ConfigurationFactory.java
new file mode 100644
index 0000000..068f67f
--- /dev/null
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/ConfigurationFactory.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.cm.auth;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.topology.discovery.cm.ClouderaManagerServiceDiscoveryMessages;
+
+import javax.security.auth.login.Configuration;
+import java.lang.reflect.Constructor;
+import java.net.URI;
+
+class ConfigurationFactory {
+
+  private static final ClouderaManagerServiceDiscoveryMessages log =
+      MessagesFactory.get(ClouderaManagerServiceDiscoveryMessages.class);
+
+  private static final Class implClazz;
+  static {
+    // Oracle and OpenJDK use the Sun implementation
+    String implName = System.getProperty("java.vendor").contains("IBM") ?
+        "com.ibm.security.auth.login.ConfigFile" : "com.sun.security.auth.login.ConfigFile";
+
+    log.usingJAASConfigurationFileImplementation(implName);
+    Class clazz = null;
+    try {
+      clazz = Class.forName(implName, false, Thread.currentThread().getContextClassLoader());
+    } catch (ClassNotFoundException e) {
+      log.failedToLoadJAASConfigurationFileImplementation(implName, e);
+    }
+
+    implClazz = clazz;
+  }
+
+  static Configuration create(URI uri) {
+    Configuration config = null;
+
+    if (implClazz != null) {
+      try {
+        Constructor ctor = implClazz.getDeclaredConstructor(URI.class);
+        config = (Configuration) ctor.newInstance(uri);
+      } catch (Exception e) {
+        log.failedToInstantiateJAASConfigurationFileImplementation(implClazz.getCanonicalName(), e);
+      }
+    } else {
+      log.noJAASConfigurationFileImplementation();
+    }
+
+    return config;
+  }
+}
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/JAASClientConfig.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/JAASClientConfig.java
new file mode 100644
index 0000000..46fdf01
--- /dev/null
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/JAASClientConfig.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.cm.auth;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+import java.net.URL;
+
+class JAASClientConfig extends Configuration {
+
+  private static final Configuration baseConfig = Configuration.getConfiguration();
+
+  private Configuration configFile;
+
+  JAASClientConfig(URL configFileURL) throws Exception {
+    if (configFileURL != null) {
+      this.configFile = ConfigurationFactory.create(configFileURL.toURI());
+    }
+  }
+
+  @Override
+  public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+    AppConfigurationEntry[] result = null;
+
+    // Try the config file if it exists
+    if (configFile != null) {
+      result = configFile.getAppConfigurationEntry(name);
+    }
+
+    // If the entry isn't there, delegate to the base configuration
+    if (result == null) {
+      result = baseConfig.getAppConfigurationEntry(name);
+    }
+
+    return result;
+  }
+}
diff --git a/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/SpnegoAuthInterceptor.java b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/SpnegoAuthInterceptor.java
new file mode 100644
index 0000000..0122239
--- /dev/null
+++ b/gateway-discovery-cm/src/main/java/org/apache/knox/gateway/topology/discovery/cm/auth/SpnegoAuthInterceptor.java
@@ -0,0 +1,248 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.cm.auth;
+
+import com.squareup.okhttp.Authenticator;
+import com.squareup.okhttp.Interceptor;
+import com.squareup.okhttp.Request;
+import com.squareup.okhttp.Response;
+
+import org.ietf.jgss.GSSContext;
+import org.ietf.jgss.GSSCredential;
+import org.ietf.jgss.GSSException;
+import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.Oid;
+
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginException;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.Proxy;
+import java.net.UnknownHostException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Base64;
+import java.util.Locale;
+import static java.lang.String.format;
+import static java.util.Objects.requireNonNull;
+
+import static org.apache.http.HttpHeaders.AUTHORIZATION;
+import static org.apache.http.HttpHeaders.WWW_AUTHENTICATE;
+import static org.ietf.jgss.GSSContext.INDEFINITE_LIFETIME;
+import static org.ietf.jgss.GSSCredential.DEFAULT_LIFETIME;
+import static org.ietf.jgss.GSSCredential.INITIATE_ONLY;
+import static org.ietf.jgss.GSSName.NT_HOSTBASED_SERVICE;
+import static org.ietf.jgss.GSSName.NT_USER_NAME;
+
+public class SpnegoAuthInterceptor implements Interceptor, Authenticator {
+
+  private static final String NEGOTIATE = "Negotiate";
+
+  private static final GSSManager GSS_MANAGER = GSSManager.getInstance();
+
+  private static final Oid SPNEGO_OID   = createOid("1.3.6.1.5.5.2");
+  private static final Oid KERBEROS_OID = createOid("1.2.840.113554.1.2.2");
+
+  private static final String DEFAULT_REMOTE_SERVICE_NAME = "HTTP";
+
+  private static final int CREDENTIAL_EXPIRATION_THRESHOLD = 60; // seconds
+
+  private final String remoteServiceName;
+  private final boolean useCanonicalHostname;
+
+  private Subject subject;
+
+  private GSSCredentialSession credentialSession;
+
+  public SpnegoAuthInterceptor(Subject subject) {
+    this(subject, DEFAULT_REMOTE_SERVICE_NAME);
+  }
+
+  public SpnegoAuthInterceptor(Subject subject,
+                               String  remoteServiceName) {
+    this(subject, remoteServiceName, true);
+  }
+
+  public SpnegoAuthInterceptor(Subject subject,
+                               String  remoteServiceName,
+                               boolean useCanonicalHostname) {
+    this.subject = subject;
+    this.remoteServiceName = remoteServiceName;
+    this.useCanonicalHostname = useCanonicalHostname;
+  }
+
+  @Override
+  public Response intercept(Chain chain) throws IOException {
+    try {
+      return chain.proceed(authenticate(chain.request()));
+    } catch (Exception ignored) {
+      return chain.proceed(chain.request());
+    }
+  }
+
+  private static boolean isNegotiate(String value) {
+    String[] split = value.split("\\s+");
+    return (split.length == 2) && split[1].equalsIgnoreCase(NEGOTIATE);
+  }
+
+  @Override
+  public Request authenticate(Proxy proxy, Response response) throws IOException {
+    // If already attempted or not challenged for Kerberos, then skip this attempt
+    if (response.request().headers(AUTHORIZATION).stream().anyMatch(SpnegoAuthInterceptor::isNegotiate) ||
+        response.headers(WWW_AUTHENTICATE).stream().noneMatch(SpnegoAuthInterceptor::isNegotiate)) {
+      return null;
+    }
+
+    return authenticate(response.request());
+  }
+
+  @Override
+  public Request authenticateProxy(Proxy proxy, Response response) throws IOException {
+    return null; // Not needed
+  }
+
+  private Request authenticate(Request request) {
+    String principal = defineServicePrincipal(remoteServiceName, request.url().getHost(), useCanonicalHostname);
+    byte[] token = generateToken(principal);
+
+    String credential = format(Locale.getDefault(), "%s %s", NEGOTIATE, Base64.getEncoder().encodeToString(token));
+    return request.newBuilder()
+                  .header(AUTHORIZATION, credential)
+                  .build();
+  }
+
+  private byte[] generateToken(String servicePrincipal) {
+    GSSContext context = null;
+    try {
+      GSSCredentialSession GSSCredentialSession = getCredentialSession();
+      context = doAs(subject, () -> {
+        GSSContext result = GSS_MANAGER.createContext(GSS_MANAGER.createName(servicePrincipal, NT_HOSTBASED_SERVICE),
+                            SPNEGO_OID,
+                            GSSCredentialSession.getClientCredential(),
+                            INDEFINITE_LIFETIME);
+        result.requestMutualAuth(true);
+        result.requestConf(true);
+        result.requestInteg(true);
+        result.requestCredDeleg(false);
+        return result;
+      });
+
+      byte[] token = context.initSecContext(new byte[0], 0, 0);
+      if (token == null) {
+        throw new LoginException("No token generated from GSS context");
+      }
+      return token;
+    } catch (GSSException | LoginException e) {
+      throw new RuntimeException(format(Locale.getDefault(), "Kerberos error for [%s]: %s", servicePrincipal, e.getMessage()), e);
+    } finally {
+      try {
+        if (context != null) {
+          context.dispose();
+        }
+      } catch (GSSException ignored) {
+      }
+    }
+  }
+
+  private synchronized GSSCredentialSession getCredentialSession() throws GSSException {
+    if ((credentialSession == null) || credentialSession.needsRefresh()) {
+      credentialSession = createCredentialSession();
+    }
+    return credentialSession;
+  }
+
+  private GSSCredentialSession createCredentialSession() throws GSSException {
+    Principal clientPrincipal = subject.getPrincipals().iterator().next();
+    GSSCredential clientCredential =
+        doAs(subject,
+            () -> GSS_MANAGER.createCredential(GSS_MANAGER.createName(clientPrincipal.getName(), NT_USER_NAME),
+                DEFAULT_LIFETIME,
+                KERBEROS_OID,
+                INITIATE_ONLY));
+
+    return new GSSCredentialSession(clientCredential);
+  }
+
+  private static String defineServicePrincipal(String serviceName, String hostName, boolean useCanonicalHostname){
+    String serviceHostName = useCanonicalHostname ? getCanonicalHostName(hostName) : hostName;
+    return format(Locale.getDefault(), "%s@%s", serviceName, serviceHostName.toLowerCase(Locale.US));
+  }
+
+  private static String getCanonicalHostName(String hostName) {
+    String canonicalHostName;
+    try {
+      InetAddress address = InetAddress.getByName(hostName);
+      if ("localhost".equalsIgnoreCase(address.getHostName())) {
+        canonicalHostName = InetAddress.getLocalHost().getCanonicalHostName();
+      } else {
+        canonicalHostName = address.getCanonicalHostName();
+      }
+    } catch (UnknownHostException e) {
+      throw new RuntimeException("Failed to resolve host: " + hostName, e);
+    }
+    return canonicalHostName;
+  }
+
+  private interface GssSupplier<T> {
+    T get() throws GSSException;
+  }
+
+  private static <T> T doAs(Subject subject, GssSupplier<T> action) throws GSSException {
+    try {
+      return Subject.doAs(subject, (PrivilegedExceptionAction<T>) action::get);
+    } catch (PrivilegedActionException e) {
+      Throwable t = e.getCause();
+      if (t instanceof GSSException) {
+        throw (GSSException)t;
+      } else if (t instanceof Error) {
+        throw (Error)t;
+      } else if (t instanceof RuntimeException) {
+        throw (RuntimeException)t;
+      } else {
+        throw new RuntimeException(t);
+      }
+    }
+  }
+
+  private static Oid createOid(String value) {
+    try {
+      return new Oid(value);
+    } catch (GSSException e) {
+      throw new AssertionError(e);
+    }
+  }
+
+  private static class GSSCredentialSession {
+    private final GSSCredential clientCredential;
+
+    GSSCredentialSession(GSSCredential clientCredential) {
+      requireNonNull(clientCredential, "gssCredential is null");
+      this.clientCredential = clientCredential;
+    }
+
+    GSSCredential getClientCredential() {
+      return clientCredential;
+    }
+
+    public boolean needsRefresh() throws GSSException {
+      return clientCredential.getRemainingLifetime() < CREDENTIAL_EXPIRATION_THRESHOLD;
+    }
+  }
+
+}
\ No newline at end of file
diff --git a/gateway-discovery-cm/src/test/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryTest.java b/gateway-discovery-cm/src/test/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryTest.java
index 7c32c43..e7dcfc4 100644
--- a/gateway-discovery-cm/src/test/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryTest.java
+++ b/gateway-discovery-cm/src/test/java/org/apache/knox/gateway/topology/discovery/cm/ClouderaManagerServiceDiscoveryTest.java
@@ -49,7 +49,126 @@ import static org.junit.Assert.assertNotNull;
 public class ClouderaManagerServiceDiscoveryTest {
 
   @Test
+  public void testHiveServiceDiscovery() {
+    doTestHiveServiceDiscovery(false);
+  }
+
+  @Test
+  public void testHiveServiceDiscoverySSL() {
+    doTestHiveServiceDiscovery(true);
+  }
+
+  private void doTestHiveServiceDiscovery(final boolean enableSSL) {
+    final String clusterName    = "test-cluster-1";
+    final String hostName       = "test-host-1";
+    final String thriftPort     = "10001";
+    final String thriftPath     = "cliService";
+    final String expectedScheme = (enableSSL ? "https" : "http");
+
+    ServiceDiscovery.Cluster cluster =
+        doTestHiveServiceDiscovery(clusterName, hostName, thriftPort, thriftPath, enableSSL);
+    assertEquals(clusterName, cluster.getName());
+    List<String> hiveURLs = cluster.getServiceURLs("HIVE");
+    assertNotNull(hiveURLs);
+    assertEquals(1, hiveURLs.size());
+    assertEquals((expectedScheme + "://" + hostName + ":" +thriftPort + "/" + thriftPath), hiveURLs.get(0));
+  }
+
+  @Test
   public void testWebHDFSServiceDiscovery() {
+    final String clusterName = "test-cluster-1";
+    final String hostName    = "test-host-1";
+    final String nameService = "nameservice1";
+    final String nnPort      = "50070";
+    final String dfsHttpPort = "50075";
+
+    ServiceDiscovery.Cluster cluster = doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort);
+    assertEquals(clusterName, cluster.getName());
+    List<String> webhdfsURLs = cluster.getServiceURLs("WEBHDFS");
+    assertNotNull(webhdfsURLs);
+    assertEquals(1, webhdfsURLs.size());
+    assertEquals("http://" + hostName + ":" + dfsHttpPort + "/webhdfs",
+                 webhdfsURLs.get(0));
+  }
+
+  @Test
+  public void testWebHDFSServiceDiscoveryWithSSL() {
+    final String clusterName  = "test-cluster-1";
+    final String hostName     = "test-host-1";
+    final String nameService  = "nameservice1";
+    final String nnPort       = "50070";
+    final String dfsHttpPort  = "50075";
+    final String dfsHttpsPort = "50079";
+
+    ServiceDiscovery.Cluster cluster =
+        doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort, dfsHttpsPort);
+    assertEquals(clusterName, cluster.getName());
+    List<String> webhdfsURLs = cluster.getServiceURLs("WEBHDFS");
+    assertNotNull(webhdfsURLs);
+    assertEquals(1, webhdfsURLs.size());
+    assertEquals("https://" + hostName + ":" + dfsHttpsPort + "/webhdfs",
+                 webhdfsURLs.get(0));
+  }
+
+  @Test
+  public void testNameNodeServiceDiscovery() {
+    final String clusterName = "test-cluster-2";
+    final String hostName    = "test-host-2";
+    final String nameService = "nameservice2";
+    final String nnPort      = "50070";
+    final String dfsHttpPort = "50071";
+
+    ServiceDiscovery.Cluster cluster = doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort);
+    assertEquals(clusterName, cluster.getName());
+    List<String> nnURLs = cluster.getServiceURLs("NAMENODE");
+    assertNotNull(nnURLs);
+    assertEquals(1, nnURLs.size());
+    assertEquals(("hdfs://" + hostName + ":" + nnPort), nnURLs.get(0));
+  }
+
+  @Test
+  public void testNameNodeServiceDiscoveryHA() {
+    final String clusterName = "test-cluster-2";
+    final String hostName    = "test-host-2";
+    final String nameService = "nameservice2";
+    final String nnPort      = "50070";
+    final String dfsHttpPort = "50071";
+
+    ServiceDiscovery.Cluster cluster =
+        doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort, null, true);
+    assertEquals(clusterName, cluster.getName());
+    List<String> nnURLs = cluster.getServiceURLs("NAMENODE");
+    assertNotNull(nnURLs);
+    assertEquals(1, nnURLs.size());
+    assertEquals(("hdfs://" + nameService), nnURLs.get(0));
+  }
+
+  @Test
+  public void testHdfsUIServiceDiscovery() {
+    final String clusterName = "test-cluster-3";
+    final String hostName    = "test-host-3";
+    final String nameService = "nameservice3";
+    final String nnPort      = "50070";
+    final String dfsHttpPort = "50071";
+
+    ServiceDiscovery.Cluster cluster = doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort);
+    assertEquals(clusterName, cluster.getName());
+    List<String> hdfsUIURLs = cluster.getServiceURLs("HDFSUI");
+    assertNotNull(hdfsUIURLs);
+    assertEquals(1, hdfsUIURLs.size());
+    assertEquals(("http://" + hostName + ":" + dfsHttpPort), hdfsUIURLs.get(0));
+  }
+
+  private ServiceDiscovery.Cluster doTestHiveServiceDiscovery(final String  clusterName,
+                                                              final String  hostName,
+                                                              final String  thriftPort,
+                                                              final String  thriftPath,
+                                                              final boolean enableSSL) {
+    final String hs2SafetyValveValue =
+          "<property><name>hive.server2.transport.mode</name><value>http</value></property>\n" +
+          "<property><name>hive.server2.thrift.http.port</name><value>" + thriftPort + "</value></property>\n" +
+          "<property><name>hive.server2.thrift.http.path</name><value>" + thriftPath + "</value></property>";
+
     GatewayConfig gwConf = EasyMock.createNiceMock(GatewayConfig.class);
     EasyMock.replay(gwConf);
 
@@ -61,46 +180,62 @@ public class ClouderaManagerServiceDiscoveryTest {
     // Prepare the service list response for the cluster
     ApiServiceList serviceList = EasyMock.createNiceMock(ApiServiceList.class);
     EasyMock.expect(serviceList.getItems())
-            .andReturn(Collections.singletonList(createMockApiService("NAMENODE-1", "HDFS")))
-            .anyTimes();
+        .andReturn(Collections.singletonList(createMockApiService("HIVE-1", "HIVE")))
+        .anyTimes();
     EasyMock.replay(serviceList);
     mockClient.addResponse(ApiServiceList.class, new TestApiServiceListResponse(serviceList));
 
-    // Prepare the HDFS service config response for the cluster
-    Map<String, String> serviceProps = new HashMap<>();
-    serviceProps.put("hdfs_hadoop_ssl_enabled", "false");
-    serviceProps.put("dfs_webhdfs_enabled", "true");
-    ApiServiceConfig hdfsServiceConfig = createMockApiServiceConfig(serviceProps);
-    mockClient.addResponse(ApiServiceConfig.class, new TestApiServiceConfigResponse(hdfsServiceConfig));
+    // Prepare the HIVE service config response for the cluster
+    ApiServiceConfig hiveServiceConfig = createMockApiServiceConfig();
+    mockClient.addResponse(ApiServiceConfig.class, new TestApiServiceConfigResponse(hiveServiceConfig));
 
-    // Prepare the NameNode role
-    ApiRole nnRole = createMockApiRole("HDFS-1-NAMENODE-d0b64dd7b7611e22bc976ede61678d9e", "NAMENODE", "test-host-1");
-    ApiRoleList nnRoleList = EasyMock.createNiceMock(ApiRoleList.class);
-    EasyMock.expect(nnRoleList.getItems()).andReturn(Collections.singletonList(nnRole)).anyTimes();
-    EasyMock.replay(nnRoleList);
-    mockClient.addResponse(ApiRoleList.class, new TestApiRoleListResponse(nnRoleList));
+    // Prepare the HS2 role
+    ApiRole hs2Role =
+        createMockApiRole("HIVE-1-HIVESERVER2-d0b64dd7b7611e22bc976ede61678d9e", "HIVESERVER2", hostName);
+    ApiRoleList hiveRoleList = EasyMock.createNiceMock(ApiRoleList.class);
+    EasyMock.expect(hiveRoleList.getItems()).andReturn(Collections.singletonList(hs2Role)).anyTimes();
+    EasyMock.replay(hiveRoleList);
+    mockClient.addResponse(ApiRoleList.class, new TestApiRoleListResponse(hiveRoleList));
 
-    // Configure the NameNode role
+    // Configure the HS2 role
     Map<String, String> roleProperties = new HashMap<>();
-    roleProperties.put("dfs_federation_namenode_nameservice", "nameservice1");
-    roleProperties.put("namenode_port", "50070");
-    roleProperties.put("dfs_http_port", "50071");
-    ApiConfigList nnRoleConfigList = createMockApiConfigList(roleProperties);
-    mockClient.addResponse(ApiConfigList.class, new TestApiConfigListResponse(nnRoleConfigList));
+    roleProperties.put("hive_hs2_config_safety_valve", hs2SafetyValveValue);
+    roleProperties.put("hive.server2.use.SSL", String.valueOf(enableSSL));
+    ApiConfigList hiveRoleConfigList = createMockApiConfigList(roleProperties);
+    mockClient.addResponse(ApiConfigList.class, new TestApiConfigListResponse(hiveRoleConfigList));
 
     // Invoke the service discovery
     ClouderaManagerServiceDiscovery cmsd = new ClouderaManagerServiceDiscovery(true);
-    ServiceDiscovery.Cluster cluster = cmsd.discover(gwConf, sdConfig, "test-cluster", mockClient);
+    ServiceDiscovery.Cluster cluster = cmsd.discover(gwConf, sdConfig, clusterName, mockClient);
     assertNotNull(cluster);
-    assertEquals("test-cluster", cluster.getName());
-    List<String> webhdfsURLs = cluster.getServiceURLs("WEBHDFS");
-    assertNotNull(webhdfsURLs);
-    assertEquals(1, webhdfsURLs.size());
-    assertEquals("http://test-host-1:50071/webhdfs", webhdfsURLs.get(0));
+    return cluster;
   }
 
-  @Test
-  public void testHiveServiceDiscovery() {
+  private ServiceDiscovery.Cluster doTestHDFSDiscovery(final String clusterName,
+                                                       final String hostName,
+                                                       final String nameService,
+                                                       final String nnPort,
+                                                       final String dfsHttpPort) {
+    return doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort, null);
+  }
+
+  private ServiceDiscovery.Cluster doTestHDFSDiscovery(final String clusterName,
+                                                       final String hostName,
+                                                       final String nameService,
+                                                       final String nnPort,
+                                                       final String dfsHttpPort,
+                                                       final String dfsHttpsPort) {
+    return doTestHDFSDiscovery(clusterName, hostName, nameService, nnPort, dfsHttpPort, dfsHttpsPort, false);
+  }
+
+  private ServiceDiscovery.Cluster doTestHDFSDiscovery(final String  clusterName,
+                                                       final String  hostName,
+                                                       final String  nameService,
+                                                       final String  nnPort,
+                                                       final String  dfsHttpPort,
+                                                       final String  dfsHttpsPort,
+                                                       final boolean enableHA) {
+
     GatewayConfig gwConf = EasyMock.createNiceMock(GatewayConfig.class);
     EasyMock.replay(gwConf);
 
@@ -112,47 +247,50 @@ public class ClouderaManagerServiceDiscoveryTest {
     // Prepare the service list response for the cluster
     ApiServiceList serviceList = EasyMock.createNiceMock(ApiServiceList.class);
     EasyMock.expect(serviceList.getItems())
-            .andReturn(Collections.singletonList(createMockApiService("HIVE-1", "HIVE")))
-            .anyTimes();
+        .andReturn(Collections.singletonList(createMockApiService("NAMENODE-1", "HDFS")))
+        .anyTimes();
     EasyMock.replay(serviceList);
     mockClient.addResponse(ApiServiceList.class, new TestApiServiceListResponse(serviceList));
 
-    // Prepare the HIVE service config response for the cluster
-    ApiServiceConfig hiveServiceConfig = createMockApiServiceConfig();
-    mockClient.addResponse(ApiServiceConfig.class, new TestApiServiceConfigResponse(hiveServiceConfig));
+    // Prepare the HDFS service config response for the cluster
+    Map<String, String> serviceProps = new HashMap<>();
+    serviceProps.put("hdfs_hadoop_ssl_enabled", String.valueOf(dfsHttpsPort != null && !dfsHttpsPort.isEmpty()));
+    serviceProps.put("dfs_webhdfs_enabled", "true");
+    ApiServiceConfig hdfsServiceConfig = createMockApiServiceConfig(serviceProps);
+    mockClient.addResponse(ApiServiceConfig.class, new TestApiServiceConfigResponse(hdfsServiceConfig));
 
-    // Prepare the HS2 role
-    ApiRole hs2Role = createMockApiRole("HIVE-1-HIVESERVER2-d0b64dd7b7611e22bc976ede61678d9e", "HIVESERVER2", "test-host-1");
-    ApiRoleList hiveRoleList = EasyMock.createNiceMock(ApiRoleList.class);
-    EasyMock.expect(hiveRoleList.getItems()).andReturn(Collections.singletonList(hs2Role)).anyTimes();
-    EasyMock.replay(hiveRoleList);
-    mockClient.addResponse(ApiRoleList.class, new TestApiRoleListResponse(hiveRoleList));
+    // Prepare the NameNode role
+    ApiRole nnRole = createMockApiRole("HDFS-1-NAMENODE-d0b64dd7b7611e22bc976ede61678d9e", "NAMENODE", hostName);
+    ApiRoleList nnRoleList = EasyMock.createNiceMock(ApiRoleList.class);
+    EasyMock.expect(nnRoleList.getItems()).andReturn(Collections.singletonList(nnRole)).anyTimes();
+    EasyMock.replay(nnRoleList);
+    mockClient.addResponse(ApiRoleList.class, new TestApiRoleListResponse(nnRoleList));
 
-    // Configure the HS2 role
+    // Configure the NameNode role
     Map<String, String> roleProperties = new HashMap<>();
-    roleProperties.put("hive_hs2_config_safety_valve",
-                       "<property><name>hive.server2.transport.mode</name><value>http</value></property>\n" +
-                       "<property><name>hive.server2.thrift.http.port</name><value>10001</value></property>\n" +
-                       "<property><name>hive.server2.thrift.http.path</name><value>cliService</value></property>");
-    ApiConfigList hiveRoleConfigList = createMockApiConfigList(roleProperties);
-    mockClient.addResponse(ApiConfigList.class, new TestApiConfigListResponse(hiveRoleConfigList));
+    roleProperties.put("dfs_federation_namenode_nameservice", nameService);
+    roleProperties.put("autofailover_enabled", String.valueOf(enableHA));
+    roleProperties.put("namenode_port", nnPort);
+    roleProperties.put("dfs_http_port", dfsHttpPort);
+    if (dfsHttpsPort != null && !dfsHttpsPort.isEmpty()) {
+      roleProperties.put("dfs_https_port", dfsHttpsPort);
+    }
+    ApiConfigList nnRoleConfigList = createMockApiConfigList(roleProperties);
+    mockClient.addResponse(ApiConfigList.class, new TestApiConfigListResponse(nnRoleConfigList));
 
     // Invoke the service discovery
     ClouderaManagerServiceDiscovery cmsd = new ClouderaManagerServiceDiscovery(true);
-    ServiceDiscovery.Cluster cluster = cmsd.discover(gwConf, sdConfig, "test-cluster", mockClient);
+    ServiceDiscovery.Cluster cluster = cmsd.discover(gwConf, sdConfig, clusterName, mockClient);
     assertNotNull(cluster);
-    assertEquals("test-cluster", cluster.getName());
-    List<String> hiveURLs = cluster.getServiceURLs("HIVE");
-    assertNotNull(hiveURLs);
-    assertEquals(1, hiveURLs.size());
-    assertEquals("http://test-host-1:10001/cliService", hiveURLs.get(0));
+    assertEquals(clusterName, cluster.getName());
+    return cluster;
   }
 
-  private ServiceDiscoveryConfig createMockDiscoveryConfig() {
+  private static ServiceDiscoveryConfig createMockDiscoveryConfig() {
     return createMockDiscoveryConfig("http://localhost:1234", "itsme");
   }
 
-  private ServiceDiscoveryConfig createMockDiscoveryConfig(String address, String username) {
+  private static ServiceDiscoveryConfig createMockDiscoveryConfig(String address, String username) {
     ServiceDiscoveryConfig config = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
     EasyMock.expect(config.getAddress()).andReturn(address).anyTimes();
     EasyMock.expect(config.getUser()).andReturn(username).anyTimes();
@@ -161,7 +299,7 @@ public class ClouderaManagerServiceDiscoveryTest {
     return config;
   }
 
-  private ApiService createMockApiService(String name, String type) {
+  private static ApiService createMockApiService(String name, String type) {
     ApiService s = EasyMock.createNiceMock(ApiService.class);
     EasyMock.expect(s.getName()).andReturn(name).anyTimes();
     EasyMock.expect(s.getType()).andReturn(type).anyTimes();
@@ -169,7 +307,7 @@ public class ClouderaManagerServiceDiscoveryTest {
     return s;
   }
 
-  private ApiRole createMockApiRole(String name, String type, String hostname) {
+  private static ApiRole createMockApiRole(String name, String type, String hostname) {
     ApiRole r = EasyMock.createNiceMock(ApiRole.class);
     EasyMock.expect(r.getName()).andReturn(name).anyTimes();
     EasyMock.expect(r.getType()).andReturn(type).anyTimes();
@@ -181,11 +319,11 @@ public class ClouderaManagerServiceDiscoveryTest {
     return r;
   }
 
-  private ApiServiceConfig createMockApiServiceConfig() {
+  private static ApiServiceConfig createMockApiServiceConfig() {
     return createMockApiServiceConfig(Collections.emptyMap());
   }
 
-  private ApiServiceConfig createMockApiServiceConfig(Map<String, String> properties) {
+  private static ApiServiceConfig createMockApiServiceConfig(Map<String, String> properties) {
     ApiServiceConfig serviceConfig = EasyMock.createNiceMock(ApiServiceConfig.class);
     List<ApiConfig> serviceConfigs = new ArrayList<>();
 
@@ -202,7 +340,7 @@ public class ClouderaManagerServiceDiscoveryTest {
     return serviceConfig;
   }
 
-  private ApiConfigList createMockApiConfigList(Map<String, String> properties) {
+  private static ApiConfigList createMockApiConfigList(Map<String, String> properties) {
     ApiConfigList configList = EasyMock.createNiceMock(ApiConfigList.class);
     List<ApiConfig> roleConfigs = new ArrayList<>();
 


Mime
View raw message