knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pzamp...@apache.org
Subject knox git commit: KNOX-1153 - Support for determining whether HA is enabled or not from the service's configuration
Date Thu, 05 Apr 2018 13:52:17 GMT
Repository: knox
Updated Branches:
  refs/heads/master f8cad74ca -> 8257ed443


KNOX-1153 - Support for determining whether HA is enabled or not from the service's configuration


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8257ed44
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8257ed44
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8257ed44

Branch: refs/heads/master
Commit: 8257ed4439e577c2100fbd95bf11bc969136641e
Parents: f8cad74
Author: Phil Zampino <pzampino@apache.org>
Authored: Thu Apr 5 09:36:49 2018 -0400
Committer: Phil Zampino <pzampino@apache.org>
Committed: Thu Apr 5 09:51:57 2018 -0400

----------------------------------------------------------------------
 .../deploy/HaProviderDeploymentContributor.java |   2 +-
 .../HaProviderDeploymentContributorTest.java    | 130 ++++++---
 .../simple/SimpleDescriptorHandler.java         |  57 +++-
 .../PropertiesFileServiceDiscoveryTest.java     |  38 ++-
 .../PropertiesFileServiceDiscovery.java         |  88 ++++++-
 .../simple/SimpleDescriptorHandlerTest.java     | 261 +++++++++++++++++--
 .../apache/knox/gateway/topology/Service.java   |   2 +
 7 files changed, 493 insertions(+), 85 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributor.java b/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributor.java
index 00c2562..633179d 100644
--- a/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributor.java
+++ b/gateway-provider-ha/src/main/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributor.java
@@ -108,7 +108,7 @@ public class HaProviderDeploymentContributor extends ProviderDeploymentContribut
     */
    private void applyParamOverrides(HaServiceConfig config, Map<String, String> serviceLevelParams) {
       if (serviceLevelParams != null && !serviceLevelParams.isEmpty()) {
-         String enabled = serviceLevelParams.get(HaServiceConfigConstants.CONFIG_PARAM_ENABLED);
+         String enabled = serviceLevelParams.get(Service.HA_ENABLED_PARAM);
          if (enabled != null) {
             config.setEnabled(Boolean.valueOf(enabled));
          }

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
index 6d3bf1e..7140663 100644
--- a/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
+++ b/gateway-provider-ha/src/test/java/org/apache/knox/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
@@ -25,6 +25,7 @@ import org.apache.knox.gateway.descriptor.GatewayDescriptor;
 import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 import org.apache.knox.gateway.ha.provider.HaDescriptor;
 import org.apache.knox.gateway.ha.provider.HaServiceConfig;
+import org.apache.knox.gateway.ha.provider.impl.HaServiceConfigConstants;
 import org.apache.knox.gateway.topology.Provider;
 import org.apache.knox.gateway.topology.Service;
 import org.apache.knox.gateway.topology.Topology;
@@ -75,14 +76,7 @@ public class HaProviderDeploymentContributorTest {
       Map<String, String> providerParams = new HashMap<>();
 
       // Specify all the possible params at the HaProvider level for TestRoleTwo
-      providerParams.put("TestRoleOne",
-                         "enabled=false;" +
-                         "maxRetryAttempts=5;"+
-                         "retrySleep=50;"+
-                         "maxFailoverAttempts=4;"+
-                         "failoverSleep=40;"+
-                         "zookeeperNamespace=testRoleOne;"+
-                         "zookeeperEnsemble=http://host1:2181,http://host2:2181");
+      providerParams.put("TestRoleOne", getHaProviderParamValue(false, 40, 4, 50, 5, "testRoleOne", "http://host1:2181,http://host2:2181"));
 
       Provider haProvider = createHaProvider(providerParams);
 
@@ -136,13 +130,13 @@ public class HaProviderDeploymentContributorTest {
 
       // Specify all the possible params in the TestRoleOne service level
       Map<String, String> testRoleOneParams = new HashMap<>();
-      testRoleOneParams.put("enabled", "true");
-      testRoleOneParams.put("maxRetryAttempts", "6");
-      testRoleOneParams.put("retrySleep", "60");
-      testRoleOneParams.put("maxFailoverAttempts", "8");
-      testRoleOneParams.put("failoverSleep", "80");
-      testRoleOneParams.put("zookeeperNamespace", "testRoleOneOverride");
-      testRoleOneParams.put("zookeeperEnsemble", "http://host3:2181,http://host4:2181");
+      testRoleOneParams.put(Service.HA_ENABLED_PARAM, "true");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_MAX_RETRY_ATTEMPTS, "6");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_RETRY_SLEEP, "60");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_MAX_FAILOVER_ATTEMPTS, "8");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_FAILOVER_SLEEP, "80");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_NAMESPACE, "testRoleOneOverride");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_ENSEMBLE, "http://host3:2181,http://host4:2181");
 
       // A service with all the params overriden
       Service testRoleOneService = EasyMock.createNiceMock(Service.class);
@@ -182,11 +176,7 @@ public class HaProviderDeploymentContributorTest {
       Map<String, String> providerParams = new HashMap<>();
 
       // Specify all the possible params at the HaProvider level for TestRoleTwo
-      providerParams.put("TestRoleOne",
-                         "enabled=false;" +
-                         "maxRetryAttempts=5;"+
-                         "maxFailoverAttempts=4;"+
-                         "failoverSleep=40");
+      providerParams.put("TestRoleOne", getHaProviderParamValue(false, 40, 4, -1, 5));
 
       Provider haProvider = createHaProvider(providerParams);
 
@@ -195,10 +185,10 @@ public class HaProviderDeploymentContributorTest {
 
       // Specify all the possible params in the TestRoleOne service level
       Map<String, String> testRoleOneParams = new HashMap<>();
-      testRoleOneParams.put("enabled", "true");
-      testRoleOneParams.put("retrySleep", "60");
-      testRoleOneParams.put("zookeeperNamespace", "testRoleOneOverride");
-      testRoleOneParams.put("zookeeperEnsemble", "http://host3:2181,http://host4:2181");
+      testRoleOneParams.put(Service.HA_ENABLED_PARAM, "true");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_RETRY_SLEEP, "60");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_NAMESPACE, "testRoleOneOverride");
+      testRoleOneParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_ENSEMBLE, "http://host3:2181,http://host4:2181");
 
       // A service with all the params overriden
       Service testRoleOneService = EasyMock.createNiceMock(Service.class);
@@ -237,18 +227,11 @@ public class HaProviderDeploymentContributorTest {
       Map<String, String> providerParams = new HashMap<>();
 
       // Specify a subset of the possible HaProvider-level params for TestRoleOne
-      providerParams.put("TestRoleOne",
-                         "enabled=true;maxRetryAttempts=1;retrySleep=10;maxFailoverAttempts=2;failoverSleep=20");
+      providerParams.put("TestRoleOne", getHaProviderParamValue(true, 20, 2, 10, 1));
 
       // Specify all the possible params at the HaProvider level for TestRoleTwo
       providerParams.put("TestRoleTwo",
-                         "enabled=false;" +
-                         "maxRetryAttempts=3;"+
-                         "retrySleep=30;"+
-                         "maxFailoverAttempts=4;"+
-                         "failoverSleep=40;"+
-                         "zookeeperNamespace=testRoleTwo;"+
-                         "zookeeperEnsemble=http://host1:2181,http://host2:2181");
+                         getHaProviderParamValue(false, 40, 4, 30, 3, "testRoleTwo", "http://host1:2181,http://host2:2181"));
 
       Provider testHaProvider = createHaProvider(providerParams);
 
@@ -265,13 +248,13 @@ public class HaProviderDeploymentContributorTest {
 
       // Override all the possible params in the TestRoleTwo service level
       Map<String, String> testRoleTwoParams = new HashMap<>();
-      testRoleTwoParams.put("enabled", "true");
-      testRoleTwoParams.put("maxRetryAttempts", "6");
-      testRoleTwoParams.put("retrySleep", "60");
-      testRoleTwoParams.put("maxFailoverAttempts", "8");
-      testRoleTwoParams.put("failoverSleep", "80");
-      testRoleTwoParams.put("zookeeperNamespace", "testRoleTwoOverride");
-      testRoleTwoParams.put("zookeeperEnsemble", "http://host3:2181,http://host4:2181");
+      testRoleTwoParams.put(Service.HA_ENABLED_PARAM, "true");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_MAX_RETRY_ATTEMPTS, "6");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_RETRY_SLEEP, "60");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_MAX_FAILOVER_ATTEMPTS, "8");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_FAILOVER_SLEEP, "80");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_NAMESPACE, "testRoleTwoOverride");
+      testRoleTwoParams.put(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_ENSEMBLE, "http://host3:2181,http://host4:2181");
 
       Service testRoleTwoService = EasyMock.createNiceMock(Service.class);
       EasyMock.expect(testRoleTwoService.getRole()).andReturn("TestRoleTwo").anyTimes();
@@ -307,6 +290,73 @@ public class HaProviderDeploymentContributorTest {
    }
 
 
+   private static String getHaProviderParamValue(boolean enabled,
+                                                 long    failoverSleep,
+                                                 int     maxFailoverAttempts,
+                                                 long    retrySleep,
+                                                 int     maxRetryAttempts) {
+      return getHaProviderParamValue(enabled, failoverSleep, maxFailoverAttempts, retrySleep, maxRetryAttempts, null, null);
+   }
+
+
+   private static String getHaProviderParamValue(boolean enabled,
+                                                 long    failoverSleep,
+                                                 int     maxFailoverAttempts,
+                                                 long    retrySleep,
+                                                 int     maxRetryAttempts,
+                                                 String  zooKeeperNamespace,
+                                                 String  zooKeeperEnsemble) {
+      StringBuilder builder = new StringBuilder();
+
+      builder.append(HaServiceConfigConstants.CONFIG_PARAM_ENABLED);
+      builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+      builder.append(String.valueOf(enabled));
+
+      if (maxRetryAttempts > -1) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_MAX_RETRY_ATTEMPTS);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(String.valueOf(maxRetryAttempts));
+      }
+
+      if (retrySleep > -1) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_RETRY_SLEEP);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(String.valueOf(retrySleep));
+      }
+
+      if (maxFailoverAttempts > -1) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_MAX_FAILOVER_ATTEMPTS);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(String.valueOf(maxFailoverAttempts));
+      }
+
+      if (failoverSleep > -1) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_FAILOVER_SLEEP);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(String.valueOf(failoverSleep));
+      }
+
+      if (zooKeeperNamespace != null) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_NAMESPACE);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(zooKeeperNamespace);
+      }
+
+      if (zooKeeperEnsemble != null) {
+         builder.append(HaServiceConfigConstants.CONFIG_PAIRS_DELIMITER);
+         builder.append(HaServiceConfigConstants.CONFIG_PARAM_ZOOKEEPER_ENSEMBLE);
+         builder.append(HaServiceConfigConstants.CONFIG_PAIR_DELIMITER);
+         builder.append(zooKeeperEnsemble);
+      }
+
+      return builder.toString();
+   }
+
    /**
     *
     * @param config              The HaServiceConfig to validate

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
index 4edb2a7..e83f961 100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -437,19 +437,48 @@ public class SimpleDescriptorHandler {
                     sw.write("        <version>" + serviceVersions.get(serviceName) + "</version>\n");
                 }
 
-                // If the service is configured for ZooKeeper-based HA
+                // Add the service params to the map for serialization
+                Map<String,String> params = serviceParams.computeIfAbsent(serviceName, k -> new HashMap<>());
+
                 ServiceDiscovery.Cluster.ZooKeeperConfig zkConf = haServiceParams.get(serviceName);
-                if (zkConf != null && zkConf.isEnabled() && zkConf.getEnsemble() != null) {
-                    // Add the zookeeper params to the map for serialization
-                    Map<String,String> params = serviceParams.computeIfAbsent(serviceName, k -> new HashMap<>());
 
+                // Determine whether to persist the haEnabled param, and to what value
+                boolean isServiceHaEnabledAuto = false;
+                boolean isServiceHaEnabled = false;
+
+                if (haProvider != null) {
+                    Map<String, String> haParams = haProvider.getParams();
+                    if (haParams != null && haParams.containsKey(serviceName)) {
+                        String serviceHaParams = haParams.get(serviceName);
+                        Map<String,String> parsedServiceHaParams = parseHaProviderParam(serviceHaParams);
+                        String enabledValue = parsedServiceHaParams.get("enabled");
+                        if (enabledValue != null) {
+                            if (enabledValue.equalsIgnoreCase("auto")) {
+                                isServiceHaEnabledAuto = true;
+                                isServiceHaEnabled = (zkConf != null && zkConf.isEnabled());
+                            } else {
+                                isServiceHaEnabled = enabledValue.equalsIgnoreCase("true");
+                            }
+                        }
+                    }
+                }
+
+                // If the HA provider configuration for this service indicates an enabled value of "auto", then
+                // persist the derived enabled value.
+                if (isServiceHaEnabledAuto) {
+                    params.put(org.apache.knox.gateway.topology.Service.HA_ENABLED_PARAM,
+                               String.valueOf(isServiceHaEnabled));
+                }
+
+                // If the service is configured for ZooKeeper-based HA
+                if (zkConf != null && zkConf.getEnsemble() != null) {
                     String ensemble = zkConf.getEnsemble();
-                    if (ensemble != null) {
+                    if (ensemble != null && !ensemble.isEmpty()) {
                         params.put("zookeeperEnsemble", ensemble);
                     }
 
                     String namespace = zkConf.getNamespace();
-                    if (namespace != null) {
+                    if (namespace != null && !namespace.isEmpty() ) {
                         params.put("zookeeperNamespace", namespace);
                     }
                 } else {
@@ -538,4 +567,20 @@ public class SimpleDescriptorHandler {
         return result;
     }
 
+    private static Map<String, String> parseHaProviderParam(String paramValue) {
+        Map<String, String> result = new HashMap<>();
+
+        String[] elements = paramValue.split(";");
+        if (elements.length > 0) {
+            for (String element : elements) {
+                String[] kv = element.split("=");
+                if (kv.length == 2) {
+                    result.put(kv[0], kv[1]);
+                }
+            }
+        }
+
+        return result;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
index 59dd247..fa64b40 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
@@ -35,12 +35,16 @@ public class PropertiesFileServiceDiscoveryTest {
     private static final Map<String, String> clusterProperties = new HashMap<>();
     static {
         clusterProperties.put("mycluster.name", "mycluster");
-        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
-        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
-        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
-        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
-        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
-        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
+        clusterProperties.put("mycluster.NAMENODE.url", "hdfs://namenodehost:8020");
+        clusterProperties.put("mycluster.JOBTRACKER.url", "rpc://jobtrackerhostname:8050");
+        clusterProperties.put("mycluster.WEBHCAT.url", "http://webhcathost:50111/templeton");
+        clusterProperties.put("mycluster.WEBHDFS.url", "http://webhdfshost1:50070/webhdfs,http://webhdfshost2:50070/webhdfs");
+        clusterProperties.put("mycluster.OOZIE.url", "http://ooziehost:11000/oozie");
+        clusterProperties.put("mycluster.HIVE.url", "http://hivehostname:10001/clipath");
+        clusterProperties.put("mycluster.RESOURCEMANAGER.url", "http://remanhost:8088/ws");
+        clusterProperties.put("mycluster.HIVE.haEnabled", "true");
+        clusterProperties.put("mycluster.HIVE.ensemble", "http://host1:1281,http://host2:1281");
+        clusterProperties.put("mycluster.HIVE.namespace", "hiveserver2");
     }
 
     private static final Properties config = new Properties();
@@ -68,8 +72,28 @@ public class PropertiesFileServiceDiscoveryTest {
                     sd.discover(gc, new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
             assertNotNull(c);
             for (String name : clusterProperties.keySet()) {
-                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
+                if (name.endsWith("url")) {
+                    String svcName = name.split("\\.")[1];
+                    if ("WEBHDFS".equals(svcName)) {
+                        List<String> webhdfsURLs = c.getServiceURLs(svcName);
+                        assertEquals(2, webhdfsURLs.size());
+                        assertEquals("http://webhdfshost1:50070/webhdfs", webhdfsURLs.get(0));
+                        assertEquals("http://webhdfshost2:50070/webhdfs", webhdfsURLs.get(1));
+                    } else {
+                        assertEquals(clusterProperties.get(name), c.getServiceURLs(svcName).get(0));
+                    }
+                }
             }
+
+            assertNull("Should not be any ZooKeeper config for RESOURCEMANAGER",
+                       c.getZooKeeperConfiguration("RESOURCEMANAGER"));
+
+            // HIVE ZooKeeper config
+            ServiceDiscovery.Cluster.ZooKeeperConfig zkConf = c.getZooKeeperConfiguration("HIVE");
+            assertNotNull(zkConf);
+            assertEquals(Boolean.valueOf(clusterProperties.get("mycluster.HIVE.haEnabled")), zkConf.isEnabled());
+            assertEquals(clusterProperties.get("mycluster.HIVE.ensemble"), zkConf.getEnsemble());
+            assertEquals(clusterProperties.get("mycluster.HIVE.namespace"), zkConf.getNamespace());
         } finally {
             discoverySource.delete();
         }

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
index 3bf8117..eeb7dca 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
@@ -49,28 +49,48 @@ class PropertiesFileServiceDiscovery implements ServiceDiscovery {
         try {
             p.load(new FileInputStream(discoveryConfig.getAddress()));
 
-            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
+            Map<String, Map<String, Map<String, String>>> clusterProperties = new HashMap<>();
+            Map<String, Map<String, List<String>>> clusterURLs = new HashMap<>();
             for (Object key : p.keySet()) {
                 String propertyKey = (String)key;
                 String[] parts = propertyKey.split("\\.");
-                if (parts.length == 2) {
+                if (parts.length == 3) {
                     String clusterName = parts[0];
-                    String serviceName = parts[1];
-                    String serviceURL  = p.getProperty(propertyKey);
-                    if (!clusters.containsKey(clusterName)) {
-                        clusters.put(clusterName, new HashMap<String, List<String>>());
+                    if (!clusterURLs.containsKey(clusterName)) {
+                        clusterURLs.put(clusterName, new HashMap<>());
+                    }
+                    if (!clusterProperties.containsKey(clusterName)) {
+                        clusterProperties.put(clusterName, new HashMap<>());
                     }
-                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
-                    if (!serviceURLs.containsKey(serviceName)) {
-                        serviceURLs.put(serviceName, new ArrayList<String>());
+                    String serviceName = parts[1];
+                    String property    = parts[2];
+                    if (property.equals("url")) {
+                        String serviceURL = p.getProperty(propertyKey);
+                        Map<String, List<String>> serviceURLs = clusterURLs.get(clusterName);
+                        if (!serviceURLs.containsKey(serviceName)) {
+                            serviceURLs.put(serviceName, new ArrayList<>());
+                        }
+
+                        // Handle muliple URLs for the service (e.g., HA)
+                        String[] svcURLs = serviceURL.split(",");
+                        for (String url : svcURLs) {
+                          serviceURLs.get(serviceName).add(url);
+                        }
+                    } else if (!property.equalsIgnoreCase("name")) { // ZooKeeper config properties
+                        Map<String, Map<String, String>> props = clusterProperties.get(clusterName);
+                        if (!props.containsKey(serviceName)) {
+                            props.put(serviceName, new HashMap<>());
+                        }
+                        props.get(serviceName).put(property, p.getProperty(propertyKey));
                     }
-                    serviceURLs.get(serviceName).add(serviceURL);
                 }
             }
 
-            for (String clusterName : clusters.keySet()) {
+            for (String clusterName : clusterURLs.keySet()) {
                 result.put(clusterName,
-                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
+                           new PropertiesFileServiceDiscovery.Cluster(clusterName,
+                                                                      clusterURLs.get(clusterName),
+                                                                      clusterProperties.get(clusterName)));
             }
         } catch (IOException e) {
             e.printStackTrace();
@@ -92,10 +112,12 @@ class PropertiesFileServiceDiscovery implements ServiceDiscovery {
     static class Cluster implements ServiceDiscovery.Cluster {
         private String name;
         private Map<String, List<String>> serviceURLS = new HashMap<>();
+        private Map<String, Map<String, String>> serviceConfigProps = new HashMap<>();
 
-        Cluster(String name, Map<String, List<String>> serviceURLs) {
+        Cluster(String name, Map<String, List<String>> serviceURLs, Map<String, Map<String, String>> svcProperties) {
             this.name = name;
             this.serviceURLS.putAll(serviceURLs);
+            this.serviceConfigProps.putAll(svcProperties);
         }
 
         @Override
@@ -115,7 +137,45 @@ class PropertiesFileServiceDiscovery implements ServiceDiscovery {
 
         @Override
         public ZooKeeperConfig getZooKeeperConfiguration(String serviceName) {
-            return null; // TODO: PJZ: Implement me
+            ZooKeeperConfig zkConf = null;
+
+            Map<String, String> svcProps = serviceConfigProps.get(serviceName);
+            if (svcProps != null) {
+                String enabled = svcProps.get("haEnabled");
+                String ensemble = svcProps.get("ensemble");
+                String namespace = svcProps.get("namespace");
+                if (enabled != null && ensemble != null) {
+                    zkConf = new ZooKeeperConfigImpl(Boolean.valueOf(enabled), ensemble, namespace);
+                }
+            }
+            return zkConf;
+        }
+
+        private static class ZooKeeperConfigImpl implements ZooKeeperConfig {
+            private boolean isEnabled = false;
+            private String  ensemble  = null;
+            private String  namespace = null;
+
+            ZooKeeperConfigImpl(boolean enabled, String ensemble, String namespace) {
+                this.isEnabled = enabled;
+                this.ensemble  = ensemble;
+                this.namespace = namespace;
+            }
+
+            @Override
+            public boolean isEnabled() {
+                return isEnabled;
+            }
+
+            @Override
+            public String getEnsemble() {
+                return ensemble;
+            }
+
+            @Override
+            public String getNamespace() {
+                return namespace;
+            }
         }
     }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index c19df66..aee2908 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -114,6 +114,17 @@ public class SimpleDescriptorHandlerTest {
             "        </provider>\n" +
             "    </gateway>\n";
 
+    private static final String TEST_HA_PROVIDER_CONFIG =
+            "    <gateway>\n" +
+            "        <provider>\n" +
+            "            <role>ha</role>\n" +
+            "            <name>HaProvider</name>\n" +
+            "            <enabled>true</enabled>\n" +
+            "            <param><name>HIVE</name><value>enabled=auto;retrySleep=20;maxRetryAttempts=3</value></param>\n" +
+            "            <param><name>WEBHDFS</name><value>enabled=true;retrySleep=40;maxRetryAttempts=5</value></param>\n" +
+            "            <param><name>WEBHBASE</name><value>enabled=auto;retrySleep=30;maxRetryAttempts=3;maxFailoverAttempts=2;failoverSleep=10</value></param>\n" +
+            "        </provider>\n" +
+            "    </gateway>\n";
 
     /**
      * KNOX-1006
@@ -134,14 +145,14 @@ public class SimpleDescriptorHandlerTest {
 
         final Properties DISCOVERY_PROPERTIES = new Properties();
         DISCOVERY_PROPERTIES.setProperty(clusterName + ".name", clusterName);
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE", "hdfs://namenodehost:8020");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".JOBTRACKER", "rpc://jobtrackerhostname:8050");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHDFS", "http://webhdfshost:1234");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHCAT", "http://webhcathost:50111/templeton");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".OOZIE", "http://ooziehost:11000/oozie");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE", "http://webhbasehost:1234");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE", "http://hivehostname:10001/clipath");
-        DISCOVERY_PROPERTIES.setProperty(clusterName + ".RESOURCEMANAGER", "http://remanhost:8088/ws");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE.url", "hdfs://namenodehost:8020");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".JOBTRACKER.url", "rpc://jobtrackerhostname:8050");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHDFS.url", "http://webhdfshost:1234");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHCAT.url", "http://webhcathost:50111/templeton");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".OOZIE.url", "http://ooziehost:11000/oozie");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.url", "http://webhbasehost:1234");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.url", "http://hivehostname:10001/clipath");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".RESOURCEMANAGER.url", "http://remanhost:8088/ws");
 
         try {
             DISCOVERY_PROPERTIES.store(new FileOutputStream(discoveryConfig), null);
@@ -315,17 +326,17 @@ public class SimpleDescriptorHandlerTest {
         // Configure the PropertiesFile Service Discovery implementation for this test
         final String DEFAULT_VALID_SERVICE_URL = "http://localhost:9999/thiswillwork";
         Properties serviceDiscoverySourceProps = new Properties();
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE.url",
                                                 DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER.url",
                                                 DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS",         DEFAULT_VALID_SERVICE_URL);
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT",         DEFAULT_VALID_SERVICE_URL);
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE",           DEFAULT_VALID_SERVICE_URL);
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE",        DEFAULT_VALID_SERVICE_URL);
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE",            "{SCHEME}://localhost:10000/");
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
-        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI",        DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS.url",         DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT.url",         DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE.url",           DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE.url",        DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE.url",            "{SCHEME}://localhost:10000/");
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER.url", DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI.url",        DEFAULT_VALID_SERVICE_URL);
         File serviceDiscoverySource = File.createTempFile("service-discovery", ".properties");
         serviceDiscoverySourceProps.store(new FileOutputStream(serviceDiscoverySource),
                                           "Test Service Discovery Source");
@@ -499,6 +510,222 @@ public class SimpleDescriptorHandlerTest {
         }
     }
 
+
+    /**
+     * KNOX-1153
+     *
+     * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
+     *             org.apache.knox.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
+     */
+    @Test
+    public void testSimpleDescriptorHandlerHaProviderConfigOverrides() throws Exception {
+
+        final String type = "PROPERTIES_FILE";
+        final String clusterName = "dummy";
+
+        // Create a properties file to be the source of service discovery details for this test
+        final File discoveryConfig = File.createTempFile(getClass().getName() + "_discovery-config", ".properties");
+
+        final String address = discoveryConfig.getAbsolutePath();
+
+        final String HIVE_HA_ENABLED   = "true";
+        final String HIVE_HA_ENSEMBLE  = "http://zkhost1:1281,http://zkhost2:1281";
+        final String HIVE_HA_NAMESPACE = "hiveserver2";
+
+        final String WEBHBASE_HA_ENABLED   = "false";
+        final String WEBHBASE_HA_ENSEMBLE  = "http://zkhost1:1281,http://zkhost2:1281";
+
+        final Properties DISCOVERY_PROPERTIES = new Properties();
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".name", clusterName);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE.url", "hdfs://namenodehost:8020");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHDFS.url", "http://webhdfshost1:1234,http://webhdfshost2:1234");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.url", "http://hivehostname:10001/clipath");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".RESOURCEMANAGER.url", "http://remanhost:8088/ws");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.url", "http://webhbasehost:8080");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.haEnabled", HIVE_HA_ENABLED);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.ensemble", HIVE_HA_ENSEMBLE);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE.namespace", HIVE_HA_NAMESPACE);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.haEnabled", WEBHBASE_HA_ENABLED);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE.ensemble", WEBHBASE_HA_ENSEMBLE);
+
+        try {
+            DISCOVERY_PROPERTIES.store(new FileOutputStream(discoveryConfig), null);
+        } catch (FileNotFoundException e) {
+            fail(e.getMessage());
+        }
+
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+
+        // Write the externalized provider config to a temp file
+        File providerConfig = new File(System.getProperty("java.io.tmpdir"), "ambari-cluster-policy.xml");
+        FileUtils.write(providerConfig, TEST_HA_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
+
+            GatewayConfig gc = EasyMock.createNiceMock(GatewayConfig.class);
+            EasyMock.replay(gc);
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getVersion()).andReturn("WEBHDFS".equals(serviceName) ? "2.4.0" : null).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.expect(svc.getParams()).andReturn(null).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                SimpleDescriptorHandler.handle(gc,
+                                               testDescriptor,
+                                               providerConfig.getParentFile(), // simple desc co-located with provider config
+                                               destDir);
+            topologyFile = files.get("topology");
+
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // KNOX-1105 Mark generated topology files
+            assertThat("Expected the \"generated\" marker element in the topology XML, with value of \"true\".",
+                       topologyXml,
+                       hasXPath("/topology/generated", is("true")));
+
+            // Validate the provider configuration
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            ProviderConfiguration testProviderConfiguration =
+                ProviderConfigurationParser.parseXML(new ByteArrayInputStream(TEST_HA_PROVIDER_CONFIG.getBytes()));
+            validateGeneratedProviderConfiguration(testProviderConfiguration, gatewayNode);
+
+            // Validate the service declarations
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+
+                // Validate the role
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+
+                // Validate the explicit version for the WEBHDFS service
+                if ("WEBHDFS".equals(role)) {
+                    Node versionNode = (Node) xpath.compile("version/text()").evaluate(serviceNode, XPathConstants.NODE);
+                    assertNotNull(versionNode);
+                    String version = versionNode.getNodeValue();
+                    assertEquals("2.4.0", version);
+
+                    // Not expecting any service params because WEBHDFS HA is not ZK-based
+                    NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET);
+                    assertNotNull(paramNodes);
+                    assertEquals(0, paramNodes.getLength());
+
+                    // Expecting multiple URLs because of HA enablement
+                    NodeList urlNodes = (NodeList) xpath.compile("url").evaluate(serviceNode, XPathConstants.NODESET);
+                    assertNotNull(urlNodes);
+                    assertEquals(2, urlNodes.getLength());
+                }
+
+                // Validate the HIV service params
+                if ("HIVE".equals(role)) {
+                    // Expecting HA-related service params
+                    NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET);
+                    assertNotNull(paramNodes);
+                    Map<String, String> hiveServiceParams = new HashMap<>();
+                    for (int paramNodeIndex=0; paramNodeIndex < paramNodes.getLength(); paramNodeIndex++) {
+                        Node paramNode = paramNodes.item(paramNodeIndex);
+                        Node nameNode = (Node) xpath.compile("name/text()").evaluate(paramNode, XPathConstants.NODE);
+                        assertNotNull(nameNode);
+                        Node valueNode = (Node) xpath.compile("value/text()").evaluate(paramNode, XPathConstants.NODE);
+                        assertNotNull(valueNode);
+                        hiveServiceParams.put(nameNode.getNodeValue(), valueNode.getNodeValue());
+                    }
+                    assertEquals("Expected true because enabled=auto and service config indicates HA is enabled",
+                                 HIVE_HA_ENABLED, hiveServiceParams.get("haEnabled"));
+                    assertEquals(HIVE_HA_ENSEMBLE, hiveServiceParams.get("zookeeperEnsemble"));
+                    assertEquals(HIVE_HA_NAMESPACE, hiveServiceParams.get("zookeeperNamespace"));
+                }
+
+                // Validate the HIV service params
+                if ("WEBHBASE".equals(role)) {
+                    // Expecting HA-related service params
+                    NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET);
+                    assertNotNull(paramNodes);
+                    Map<String, String> webhbaseServiceParams = new HashMap<>();
+                    for (int paramNodeIndex=0; paramNodeIndex < paramNodes.getLength(); paramNodeIndex++) {
+                        Node paramNode = paramNodes.item(paramNodeIndex);
+                        Node nameNode = (Node) xpath.compile("name/text()").evaluate(paramNode, XPathConstants.NODE);
+                        assertNotNull(nameNode);
+                        Node valueNode = (Node) xpath.compile("value/text()").evaluate(paramNode, XPathConstants.NODE);
+                        assertNotNull(valueNode);
+                        webhbaseServiceParams.put(nameNode.getNodeValue(), valueNode.getNodeValue());
+                    }
+                    assertEquals("Expected false because enabled=auto and service config indicates HA is NOT enabled",
+                                 WEBHBASE_HA_ENABLED, webhbaseServiceParams.get("haEnabled"));
+                    assertEquals(WEBHBASE_HA_ENSEMBLE, webhbaseServiceParams.get("zookeeperEnsemble"));
+                }
+
+                // Validate the URLs
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+
+                    // If the service should have a URL (some don't require it)
+                    if (serviceURLs.containsKey(role)) {
+                        assertNotNull("Declared service should have a URL.", url);
+                        if (!topologyServiceURLs.containsKey(role)) {
+                            topologyServiceURLs.put(role, new ArrayList<>());
+                        }
+                        topologyServiceURLs.get(role).add(url); // Add it for validation later
+                    }
+                }
+            }
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            providerConfig.delete();
+            discoveryConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
     private File writeProviderConfig(String path, String content) throws IOException {
         File f = new File(path);
         FileUtils.write(f, content);

http://git-wip-us.apache.org/repos/asf/knox/blob/8257ed44/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Service.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Service.java b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Service.java
index de642bc..faa8165 100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Service.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Service.java
@@ -25,6 +25,8 @@ import java.util.Map;
 
 public class Service {
 
+  public static final String HA_ENABLED_PARAM = "haEnabled";
+
   private String role;
   private String name;
   private Version version;


Mime
View raw message