knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kmin...@apache.org
Subject [4/4] git commit: KNOX-88: Support HDFS HA
Date Fri, 15 Aug 2014 20:41:32 GMT
KNOX-88: Support HDFS HA


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/9aaeeed1
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/9aaeeed1
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/9aaeeed1

Branch: refs/heads/master
Commit: 9aaeeed179cd9cd0a5ad378f7bf0d8b736fe814e
Parents: 93b5625
Author: Kevin Minder <kevin.minder@hortonworks.com>
Authored: Fri Aug 15 16:41:13 2014 -0400
Committer: Kevin Minder <kevin.minder@hortonworks.com>
Committed: Fri Aug 15 16:41:13 2014 -0400

----------------------------------------------------------------------
 gateway-provider-ha/pom.xml                     |  86 ++
 .../deploy/HaProviderDeploymentContributor.java |  98 +++
 .../gateway/ha/provider/HaDescriptor.java       |  34 +
 .../hadoop/gateway/ha/provider/HaProvider.java  |  59 ++
 .../gateway/ha/provider/HaServiceConfig.java    |  45 +
 .../ha/provider/HaServletContextListener.java   | 116 +++
 .../ha/provider/impl/DefaultHaDescriptor.java   |  71 ++
 .../ha/provider/impl/DefaultHaProvider.java     |  81 ++
 .../provider/impl/DefaultHaServiceConfig.java   |  99 +++
 .../ha/provider/impl/HaDescriptorConstants.java |  45 +
 .../ha/provider/impl/HaDescriptorFactory.java   |  87 ++
 .../ha/provider/impl/HaDescriptorManager.java   | 118 +++
 .../provider/impl/HaServiceConfigConstants.java |  46 +
 .../gateway/ha/provider/impl/URLManager.java    |  56 ++
 .../ha/provider/impl/i18n/HaMessages.java       |  37 +
 ...gateway.deploy.ProviderDeploymentContributor |  19 +
 .../HaProviderDeploymentContributorTest.java    |  46 +
 .../ha/provider/impl/DefaultHaProviderTest.java |  77 ++
 .../provider/impl/HaDescriptorFactoryTest.java  |  53 ++
 .../provider/impl/HaDescriptorManagerTest.java  |  86 ++
 .../ha/provider/impl/URLManagerTest.java        |  44 +
 .../jersey/JerseyDeploymentContributorTest.java |   2 +-
 .../pom.xml                                     |   4 +
 .../ServiceRegistryFunctionProcessorBase.java   |  10 +-
 .../impl/ServiceUrlFunctionProcessor.java       |  21 +-
 .../ServiceAddressFunctionProcessorTest.java    |  15 +-
 .../impl/ServiceHostFunctionProcessorTest.java  |  16 +-
 ...rviceMappedAddressFunctionProcessorTest.java |  15 +-
 .../ServiceMappedHostFunctionProcessorTest.java |  15 +-
 .../ServiceMappedUrlFunctionProcessorTest.java  |  16 +-
 .../impl/ServicePathFunctionProcessorTest.java  |  15 +-
 .../impl/ServicePortFunctionProcessorTest.java  |  15 +-
 .../impl/ServiceRegistryFunctionsTest.java      |  10 +-
 .../ServiceSchemeFunctionProcessorTest.java     |  15 +-
 .../impl/ServiceUrlFunctionProcessorTest.java   |  15 +-
 gateway-release/pom.xml                         |   4 +
 .../apache/hadoop/gateway/GatewayMessages.java  |   1 -
 .../gateway/deploy/DeploymentFactory.java       |   2 +-
 .../dispatch/PartiallyRepeatableHttpEntity.java | 163 ----
 .../impl/DefaultServiceRegistryService.java     |  20 +-
 .../services/registry/impl/RegEntry.java        |  12 +-
 .../interpreter/ServicePropertyInterpreter.java |   3 +-
 .../xml/KnoxFormatXmlTopologyRules.java         |   2 +-
 .../PartiallyRepeatableHttpEntityTest.java      | 874 -------------------
 .../topology/xml/TopologyRulesModuleTest.java   |  26 +-
 .../service-param-topology-ambari-format.conf   |   6 +-
 .../xml/service-param-topology-knox-format.xml  |   3 +-
 .../xml/simple-topology-ambari-format.conf      |   6 +-
 .../xml/simple-topology-knox-format.xml         |   3 +-
 gateway-service-webhdfs/pom.xml                 |  26 +
 ...NameNodeHaDispatchDeploymentContributor.java |  60 ++
 .../hdfs/WebHdfsDeploymentContributor.java      |  27 +-
 .../hdfs/dispatch/SafeModeException.java        |  21 +
 .../gateway/hdfs/dispatch/StandbyException.java |  21 +
 .../dispatch/WebHdfsHaHttpClientDispatch.java   | 160 ++++
 .../gateway/hdfs/i18n/WebHdfsMessages.java      |  57 ++
 ...gateway.deploy.ProviderDeploymentContributor |  19 +
 ...NodeHaDispatchDeploymentContributorTest.java |  44 +
 .../WebHdfsHaHttpClientDispatchTest.java        |  99 +++
 .../gateway/dispatch/HttpClientDispatch.java    | 476 +++++-----
 .../dispatch/PartiallyRepeatableHttpEntity.java | 163 ++++
 .../services/registry/ServiceRegistry.java      |   8 +-
 .../apache/hadoop/gateway/topology/Service.java |  23 +-
 .../PartiallyRepeatableHttpEntityTest.java      | 874 +++++++++++++++++++
 .../hadoop/gateway/WebHdfsHaFuncTest.java       | 286 ++++++
 .../deploy/DeploymentFactoryFuncTest.java       |   6 +-
 .../hadoop/gateway/WebHdfsHaFuncTest/users.ldif | 121 +++
 .../webhdfs-liststatus-standby.json             |   5 +
 .../webhdfs-liststatus-success.json             |  88 ++
 .../webhdfs-rename-safemode-off.json            |   1 +
 .../webhdfs-rename-safemode.json                |   5 +
 pom.xml                                         |   6 +
 72 files changed, 3932 insertions(+), 1376 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/pom.xml b/gateway-provider-ha/pom.xml
new file mode 100644
index 0000000..b5c687f
--- /dev/null
+++ b/gateway-provider-ha/pom.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.5.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-provider-ha</artifactId>
+
+    <name>gateway-provider-ha</name>
+    <description>An extension of the gateway that supports Hadoop services standing in HA mode</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-library</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-provider-rewrite</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributor.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributor.java
new file mode 100644
index 0000000..10cce6c
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributor.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.deploy;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
+import org.apache.hadoop.gateway.ha.provider.impl.HaDescriptorFactory;
+import org.apache.hadoop.gateway.ha.provider.impl.HaDescriptorManager;
+import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.topology.Provider;
+import org.apache.hadoop.gateway.topology.Service;
+import org.jboss.shrinkwrap.api.asset.StringAsset;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+public class HaProviderDeploymentContributor extends ProviderDeploymentContributorBase {
+
+   private static final String PROVIDER_ROLE_NAME = "ha";
+
+   private static final String PROVIDER_IMPL_NAME = "HaProvider";
+
+   private static final String HA_DESCRIPTOR_NAME = "ha.provider.descriptor";
+
+   private static final HaMessages LOG = MessagesFactory.get(HaMessages.class);
+
+   @Override
+   public String getRole() {
+      return PROVIDER_ROLE_NAME;
+   }
+
+   @Override
+   public String getName() {
+      return PROVIDER_IMPL_NAME;
+   }
+
+   @Override
+   public void contributeProvider(DeploymentContext context, Provider provider) {
+      Map<String, String> params = provider.getParams();
+      HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
+      for (Entry<String, String> entry : params.entrySet()) {
+         HaServiceConfig config = HaDescriptorFactory.createServiceConfig(entry.getKey(), entry.getValue());
+         descriptor.addServiceConfig(config);
+      }
+      StringWriter writer = new StringWriter();
+      try {
+         HaDescriptorManager.store(descriptor, writer);
+      } catch (IOException e) {
+         LOG.failedToWriteHaDescriptor(e);
+      }
+      String asset = writer.toString();
+      context.getWebArchive().addAsWebInfResource(
+            new StringAsset(asset),
+            HaServletContextListener.DESCRIPTOR_DEFAULT_FILE_NAME);
+      context.addDescriptor(HA_DESCRIPTOR_NAME, descriptor);
+   }
+
+   @Override
+   public void finalizeContribution(DeploymentContext context) {
+      if (context.getDescriptor(HA_DESCRIPTOR_NAME) != null) {
+         // Tell the provider the location of the descriptor.
+         // Doing this here instead of in 'contributeProvider' so that this ServletContextListener comes after the gateway services have been set.
+         context.getWebAppDescriptor().createListener().listenerClass(HaServletContextListener.class.getName());
+         context.getWebAppDescriptor().createContextParam()
+               .paramName(HaServletContextListener.DESCRIPTOR_LOCATION_INIT_PARAM_NAME)
+               .paramValue(HaServletContextListener.DESCRIPTOR_DEFAULT_LOCATION);
+      }
+   }
+
+   @Override
+   public void contributeFilter(DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params) {
+      //no op
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaDescriptor.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaDescriptor.java
new file mode 100644
index 0000000..e0b5b04
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaDescriptor.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider;
+
+import java.util.List;
+
+public interface HaDescriptor {
+
+   public void addServiceConfig(HaServiceConfig serviceConfig);
+
+   public HaServiceConfig getServiceConfig(String serviceName);
+
+   public List<String> getServiceNames();
+
+   public List<String> getEnabledServiceNames();
+
+   public List<HaServiceConfig> getServiceConfigs();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaProvider.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaProvider.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaProvider.java
new file mode 100644
index 0000000..5e1b91d
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaProvider.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider;
+
+import java.util.List;
+
+public interface HaProvider {
+
+   public HaDescriptor getHaDescriptor();
+
+   /**
+    * Add a service name (role) as a HA service with the URLs that it is configured for
+    *
+    * @param serviceName the name of the service
+    * @param urls        the list of urls that can be used for that service
+    */
+   public void addHaService(String serviceName, List<String> urls);
+
+   /**
+    * Returns whether the service is enabled for HA
+    *
+    * @param serviceName the name of the service
+    * @return true if the service is enabled; false otherwise
+    */
+   public boolean isHaEnabled(String serviceName);
+
+   /**
+    * Returns the current URL that is known to be active for the service
+    *
+    * @param serviceName the name of the service
+    * @return the URL as a string or null if the service name is not found
+    */
+   public String getActiveURL(String serviceName);
+
+   /**
+    * Mark the URL for the service as one that has failed. This method puts changes the active URL to
+    * the next available URL for the service.
+    *
+    * @param serviceName the name of the service
+    * @param url         the URL that has failed in some way
+    */
+   public void markFailedURL(String serviceName, String url);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServiceConfig.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServiceConfig.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServiceConfig.java
new file mode 100644
index 0000000..aa116fd
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServiceConfig.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider;
+
+public interface HaServiceConfig {
+
+   public void setServiceName(String name);
+
+   public String getServiceName();
+
+   public boolean isEnabled();
+
+   public void setEnabled(boolean enabled);
+
+   public void setMaxFailoverAttempts(int limit);
+
+   public int getMaxFailoverAttempts();
+
+   public void setFailoverSleep(int sleep);
+
+   public int getFailoverSleep();
+
+   public void setMaxRetryAttempts(int limit);
+
+   public int getMaxRetryAttempts();
+
+   public void setRetrySleep(int sleep);
+
+   public int getRetrySleep();
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
new file mode 100644
index 0000000..62dd684
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider;
+
+import org.apache.hadoop.gateway.ha.provider.impl.DefaultHaProvider;
+import org.apache.hadoop.gateway.ha.provider.impl.HaDescriptorManager;
+import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+
+public class HaServletContextListener implements ServletContextListener {
+
+   public static final String PROVIDER_ATTRIBUTE_NAME = HaProvider.class.getName();
+
+   public static final String DESCRIPTOR_LOCATION_INIT_PARAM_NAME = "haDescriptorLocation";
+
+   public static final String DESCRIPTOR_DEFAULT_FILE_NAME = "ha.xml";
+
+   public static final String DESCRIPTOR_DEFAULT_LOCATION = "/WEB-INF/" + DESCRIPTOR_DEFAULT_FILE_NAME;
+
+   private static final HaMessages LOG = MessagesFactory.get(HaMessages.class);
+
+
+   @Override
+   public void contextInitialized(ServletContextEvent event) {
+      HaDescriptor descriptor;
+      ServletContext servletContext = event.getServletContext();
+      try {
+         URL url = locateDescriptor(servletContext);
+         descriptor = loadDescriptor(url);
+      } catch (IOException e) {
+         throw new IllegalStateException(e);
+      }
+      setupHaProvider(descriptor, servletContext);
+   }
+
+   @Override
+   public void contextDestroyed(ServletContextEvent event) {
+      event.getServletContext().removeAttribute(PROVIDER_ATTRIBUTE_NAME);
+   }
+
+   public static HaProvider getHaProvider(ServletContext context) {
+      return (HaProvider) context.getAttribute(PROVIDER_ATTRIBUTE_NAME);
+   }
+
+   private void setupHaProvider(HaDescriptor descriptor, ServletContext servletContext) {
+      GatewayServices services = (GatewayServices) servletContext.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+      String clusterName = (String) servletContext.getAttribute(GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE);
+      ServiceRegistry serviceRegistry = services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      List<String> serviceNames = descriptor.getEnabledServiceNames();
+      for (String serviceName : serviceNames) {
+         provider.addHaService(serviceName, serviceRegistry.lookupServiceURLs(clusterName, serviceName));
+      }
+      servletContext.setAttribute(PROVIDER_ATTRIBUTE_NAME, provider);
+   }
+
+   private static URL locateDescriptor(ServletContext context) throws IOException {
+      String param = context.getInitParameter(DESCRIPTOR_LOCATION_INIT_PARAM_NAME);
+      if (param == null) {
+         param = DESCRIPTOR_DEFAULT_LOCATION;
+      }
+      URL url;
+      try {
+         url = context.getResource(param);
+      } catch (MalformedURLException e) {
+         // Ignore it and try using the value directly as a URL.
+         url = null;
+      }
+      if (url == null) {
+         url = new URL(param);
+      }
+      if (url == null) {
+         throw new FileNotFoundException(param);
+      }
+      return url;
+   }
+
+   private static HaDescriptor loadDescriptor(URL url) throws IOException {
+      InputStream stream = url.openStream();
+      HaDescriptor descriptor = HaDescriptorManager.load(stream);
+      try {
+         stream.close();
+      } catch (IOException e) {
+         LOG.failedToLoadHaDescriptor(e);
+      }
+      return descriptor;
+   }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaDescriptor.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaDescriptor.java
new file mode 100644
index 0000000..a551bfe
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaDescriptor.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import com.google.common.collect.Lists;
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class DefaultHaDescriptor implements HaDescriptor {
+
+   private ConcurrentHashMap<String, HaServiceConfig> serviceConfigs;
+
+   public DefaultHaDescriptor() {
+      serviceConfigs = new ConcurrentHashMap<String, HaServiceConfig>();
+   }
+
+   @Override
+   public void addServiceConfig(HaServiceConfig serviceConfig) {
+      if (serviceConfig == null) {
+         throw new IllegalArgumentException("Service config must not be null");
+      }
+      serviceConfigs.put(serviceConfig.getServiceName(), serviceConfig);
+   }
+
+   @Override
+   public HaServiceConfig getServiceConfig(String serviceName) {
+      return serviceConfigs.get(serviceName);
+   }
+
+   @Override
+   public List<HaServiceConfig> getServiceConfigs() {
+      return Lists.newArrayList(serviceConfigs.values());
+   }
+
+   @Override
+   public List<String> getServiceNames() {
+      return Lists.newArrayList(serviceConfigs.keySet());
+   }
+
+   @Override
+   public List<String> getEnabledServiceNames() {
+      ArrayList<String> services = new ArrayList<String>(serviceConfigs.size());
+      Collection<HaServiceConfig> configs = serviceConfigs.values();
+      for (HaServiceConfig config : configs) {
+         if (config.isEnabled()) {
+            services.add(config.getServiceName());
+         }
+      }
+      return services;
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProvider.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProvider.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProvider.java
new file mode 100644
index 0000000..880707a
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProvider.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class DefaultHaProvider implements HaProvider {
+
+   private static final HaMessages LOG = MessagesFactory.get(HaMessages.class);
+
+   private HaDescriptor descriptor;
+
+   private ConcurrentHashMap<String, URLManager> haServices;
+
+   public DefaultHaProvider(HaDescriptor descriptor) {
+      if (descriptor == null) {
+         throw new IllegalArgumentException("Descriptor can not be null");
+      }
+      this.descriptor = descriptor;
+      haServices = new ConcurrentHashMap<String, URLManager>();
+   }
+
+   @Override
+   public HaDescriptor getHaDescriptor() {
+      return descriptor;
+   }
+
+   @Override
+   public void addHaService(String serviceName, List<String> urls) {
+      haServices.put(serviceName, new URLManager(urls));
+   }
+
+   @Override
+   public boolean isHaEnabled(String serviceName) {
+      HaServiceConfig config = descriptor.getServiceConfig(serviceName);
+      if (config != null && config.isEnabled()) {
+         return true;
+      }
+      return false;
+   }
+
+   @Override
+   public String getActiveURL(String serviceName) {
+      if (haServices.containsKey(serviceName)) {
+         return haServices.get(serviceName).getActiveURL();
+      }
+      LOG.noActiveUrlFound(serviceName);
+      return null;
+   }
+
+   @Override
+   public void markFailedURL(String serviceName, String url) {
+      if (haServices.containsKey(serviceName)) {
+         haServices.get(serviceName).markFailed(url);
+      } else {
+         LOG.noServiceFound(serviceName);
+      }
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaServiceConfig.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaServiceConfig.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaServiceConfig.java
new file mode 100644
index 0000000..4bb1e62
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaServiceConfig.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+
+public class DefaultHaServiceConfig implements HaServiceConfig, HaServiceConfigConstants {
+
+   private String name;
+
+   private boolean enabled = DEFAULT_ENABLED;
+
+   private int maxFailoverAttempts = DEFAULT_MAX_FAILOVER_ATTEMPTS;
+
+   private int failoverSleep = DEFAULT_FAILOVER_SLEEP;
+
+   private int maxRetryAttempts = DEFAULT_MAX_RETRY_ATTEMPTS;
+
+   private int retrySleep  = DEFAULT_RETRY_SLEEP;
+
+   public DefaultHaServiceConfig(String name) {
+      this.name = name;
+   }
+   @Override
+
+   public String getServiceName() {
+      return name;
+   }
+
+   @Override
+   public void setServiceName(String name) {
+      this.name = name;
+   }
+
+   @Override
+   public boolean isEnabled() {
+      return enabled;
+   }
+
+   @Override
+   public void setEnabled(boolean enabled) {
+      this.enabled = enabled;
+   }
+
+   @Override
+   public int getMaxFailoverAttempts() {
+      return maxFailoverAttempts;
+   }
+
+   @Override
+   public void setMaxFailoverAttempts(int maxFailoverAttempts) {
+      this.maxFailoverAttempts = maxFailoverAttempts;
+   }
+
+   @Override
+   public int getFailoverSleep() {
+      return failoverSleep;
+   }
+
+   @Override
+   public void setFailoverSleep(int failoverSleep) {
+      this.failoverSleep = failoverSleep;
+   }
+
+   @Override
+   public int getMaxRetryAttempts() {
+      return maxRetryAttempts;
+   }
+
+   @Override
+   public void setMaxRetryAttempts(int maxRetryAttempts) {
+      this.maxRetryAttempts = maxRetryAttempts;
+   }
+
+   @Override
+   public int getRetrySleep() {
+      return retrySleep;
+   }
+
+   @Override
+   public void setRetrySleep(int retrySleep) {
+      this.retrySleep = retrySleep;
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorConstants.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorConstants.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorConstants.java
new file mode 100644
index 0000000..8113f1c
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorConstants.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+/**
+ * The constants for xml elements and attributes are meant to help render/consume the following:
+ * <p/>
+ * <ha>
+ * <service name='foo' failoverLimit='3' enabled='true'/>
+ * </ha>
+ */
+public interface HaDescriptorConstants {
+
+   public static final String ROOT_ELEMENT = "ha";
+
+   public static final String SERVICE_ELEMENT = "service";
+
+   public static final String SERVICE_NAME_ATTRIBUTE = "name";
+
+   public static final String MAX_FAILOVER_ATTEMPTS = "maxFailoverAttempts";
+
+   public static final String FAILOVER_SLEEP = "failoverSleep";
+
+   public static final String MAX_RETRY_ATTEMPTS = "maxRetryAttempts";
+
+   public static final String RETRY_SLEEP = "retrySleep";
+
+   public static final String ENABLED_ATTRIBUTE = "enabled";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactory.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactory.java
new file mode 100644
index 0000000..b5e1232
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactory.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public abstract class HaDescriptorFactory implements HaServiceConfigConstants {
+
+   public static HaDescriptor createDescriptor() {
+      return new DefaultHaDescriptor();
+   }
+
+   public static HaServiceConfig createServiceConfig(String serviceName, String config) {
+      Map<String, String> configMap = parseHaConfiguration(config);
+      String enabledValue = configMap.get(CONFIG_PARAM_ENABLED);
+      String maxFailoverAttempts = configMap.get(CONFIG_PARAM_MAX_FAILOVER_ATTEMPTS);
+      String failoverSleep = configMap.get(CONFIG_PARAM_FAILOVER_SLEEP);
+      String maxRetryAttempts = configMap.get(CONFIG_PARAM_MAX_RETRY_ATTEMPTS);
+      String retrySleep = configMap.get(CONFIG_PARAM_RETRY_SLEEP);
+      return createServiceConfig(serviceName, enabledValue, maxFailoverAttempts, failoverSleep, maxRetryAttempts, retrySleep);
+   }
+
+   public static HaServiceConfig createServiceConfig(String serviceName, String enabledValue,
+                                                     String maxFailoverAttemptsValue, String failoverSleepValue,
+                                                     String maxRetryAttemptsValue, String retrySleepValue) {
+      boolean enabled = DEFAULT_ENABLED;
+      int maxFailoverAttempts = DEFAULT_MAX_FAILOVER_ATTEMPTS;
+      int failoverSleep = DEFAULT_FAILOVER_SLEEP;
+      int maxRetryAttempts = DEFAULT_MAX_RETRY_ATTEMPTS;
+      int retrySleep = DEFAULT_RETRY_SLEEP;
+      if (enabledValue != null && enabledValue.trim().length() > 0) {
+         enabled = Boolean.parseBoolean(enabledValue);
+      }
+      if (maxFailoverAttemptsValue != null && maxFailoverAttemptsValue.trim().length() > 0) {
+         maxFailoverAttempts = Integer.parseInt(maxFailoverAttemptsValue);
+      }
+      if (failoverSleepValue != null && failoverSleepValue.trim().length() > 0) {
+         failoverSleep = Integer.parseInt(failoverSleepValue);
+      }
+      if (maxRetryAttemptsValue != null && maxRetryAttemptsValue.trim().length() > 0) {
+         maxRetryAttempts = Integer.parseInt(maxRetryAttemptsValue);
+      }
+      if (retrySleepValue != null && retrySleepValue.trim().length() > 0) {
+         retrySleep = Integer.parseInt(retrySleepValue);
+      }
+      DefaultHaServiceConfig serviceConfig = new DefaultHaServiceConfig(serviceName);
+      serviceConfig.setEnabled(enabled);
+      serviceConfig.setMaxFailoverAttempts(maxFailoverAttempts);
+      serviceConfig.setFailoverSleep(failoverSleep);
+      serviceConfig.setMaxRetryAttempts(maxRetryAttempts);
+      serviceConfig.setRetrySleep(retrySleep);
+      return serviceConfig;
+   }
+
+   private static Map<String, String> parseHaConfiguration(String configuration) {
+      Map<String, String> parameters = new HashMap<String, String>();
+      if (configuration != null) {
+         String[] pairs = configuration.split(CONFIG_PAIRS_DELIMITER);
+         for (String pair : pairs) {
+            String[] tokens = pair.split(CONFIG_PAIR_DELIMITER);
+            if (tokens.length == 2) {
+               parameters.put(tokens[0], tokens[1]);
+            }
+         }
+      }
+      return parameters;
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManager.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManager.java
new file mode 100644
index 0000000..ba538f0
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManager.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Writer;
+import java.util.List;
+
+public class HaDescriptorManager implements HaDescriptorConstants {
+
+   private static final HaMessages LOG = MessagesFactory.get(HaMessages.class);
+
+   public static void store(HaDescriptor descriptor, Writer writer) throws IOException {
+      try {
+         DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
+         DocumentBuilder builder = builderFactory.newDocumentBuilder();
+         Document document = builder.newDocument();
+         document.setXmlStandalone(true);
+
+         Element root = document.createElement(ROOT_ELEMENT);
+         document.appendChild(root);
+
+         List<HaServiceConfig> serviceConfigs = descriptor.getServiceConfigs();
+         if (serviceConfigs != null && !serviceConfigs.isEmpty()) {
+            for (HaServiceConfig config : serviceConfigs) {
+               Element serviceElement = document.createElement(SERVICE_ELEMENT);
+               serviceElement.setAttribute(SERVICE_NAME_ATTRIBUTE, config.getServiceName());
+               serviceElement.setAttribute(MAX_FAILOVER_ATTEMPTS, Integer.toString(config.getMaxFailoverAttempts()));
+               serviceElement.setAttribute(FAILOVER_SLEEP, Integer.toString(config.getFailoverSleep()));
+               serviceElement.setAttribute(MAX_RETRY_ATTEMPTS, Integer.toString(config.getMaxRetryAttempts()));
+               serviceElement.setAttribute(RETRY_SLEEP, Integer.toString(config.getRetrySleep()));
+               serviceElement.setAttribute(ENABLED_ATTRIBUTE, Boolean.toString(config.isEnabled()));
+               root.appendChild(serviceElement);
+            }
+         }
+
+         TransformerFactory transformerFactory = TransformerFactory.newInstance();
+         transformerFactory.setAttribute("indent-number", 2);
+         Transformer transformer = transformerFactory.newTransformer();
+         transformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
+         transformer.setOutputProperty(OutputKeys.INDENT, "yes");
+         StreamResult result = new StreamResult(writer);
+         DOMSource source = new DOMSource(document);
+         transformer.transform(source, result);
+
+      } catch (ParserConfigurationException e) {
+         LOG.failedToWriteHaDescriptor(e);
+         throw new IOException(e);
+      } catch (TransformerException e) {
+         LOG.failedToWriteHaDescriptor(e);
+         throw new IOException(e);
+      }
+   }
+
+   public static HaDescriptor load(InputStream inputStream) throws IOException {
+      HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
+      DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
+      try {
+         DocumentBuilder builder = builderFactory.newDocumentBuilder();
+         Document document = builder.parse(inputStream);
+         NodeList nodeList = document.getElementsByTagName(SERVICE_ELEMENT);
+         if (nodeList != null && nodeList.getLength() > 0) {
+            for (int i = 0; i < nodeList.getLength(); i++) {
+               Element element = (Element) nodeList.item(i);
+               HaServiceConfig config = HaDescriptorFactory.createServiceConfig(element.getAttribute(SERVICE_NAME_ATTRIBUTE),
+                     element.getAttribute(ENABLED_ATTRIBUTE),
+                     element.getAttribute(MAX_FAILOVER_ATTEMPTS),
+                     element.getAttribute(FAILOVER_SLEEP),
+                     element.getAttribute(MAX_RETRY_ATTEMPTS),
+                     element.getAttribute(RETRY_SLEEP));
+               descriptor.addServiceConfig(config);
+            }
+         }
+      } catch (ParserConfigurationException e) {
+         LOG.failedToLoadHaDescriptor(e);
+         throw new IOException(e);
+      } catch (SAXException e) {
+         LOG.failedToLoadHaDescriptor(e);
+         throw new IOException(e);
+      }
+      return descriptor;
+   }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaServiceConfigConstants.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaServiceConfigConstants.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaServiceConfigConstants.java
new file mode 100644
index 0000000..7dc3bc8
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/HaServiceConfigConstants.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+public interface HaServiceConfigConstants {
+
+   public static final String CONFIG_PAIRS_DELIMITER = ";";
+
+   public static final String CONFIG_PAIR_DELIMITER = "=";
+
+   public static final String CONFIG_PARAM_MAX_FAILOVER_ATTEMPTS = "maxFailoverAttempts";
+
+   public static final String CONFIG_PARAM_FAILOVER_SLEEP = "failoverSleep";
+
+   public static final String CONFIG_PARAM_MAX_RETRY_ATTEMPTS = "maxRetryAttempts";
+
+   public static final String CONFIG_PARAM_RETRY_SLEEP = "retrySleep";
+
+   public static final String CONFIG_PARAM_ENABLED = "enabled";
+
+   public static final int DEFAULT_MAX_FAILOVER_ATTEMPTS = 3;
+
+   public static final int DEFAULT_FAILOVER_SLEEP = 1000;
+
+   public static final int DEFAULT_MAX_RETRY_ATTEMPTS = 3;
+
+   public static final int DEFAULT_RETRY_SLEEP = 1000;
+
+   public static final boolean DEFAULT_ENABLED = true;
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/URLManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/URLManager.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/URLManager.java
new file mode 100644
index 0000000..9f7bab8
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/URLManager.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import com.google.common.collect.Lists;
+
+import java.util.List;
+import java.util.concurrent.ConcurrentLinkedQueue;
+
+public class URLManager {
+
+   private ConcurrentLinkedQueue<String> urls = new ConcurrentLinkedQueue<String>();
+
+   public URLManager(List<String> urls) {
+      this.urls.addAll(urls);
+   }
+
+   public String getActiveURL() {
+      return urls.peek();
+   }
+
+   public List<String> getURLs() {
+      return Lists.newArrayList(urls.iterator());
+   }
+
+   public void setURLs(List<String> urls) {
+      if (urls != null) {
+         urls.clear();
+         urls.addAll(urls);
+      }
+   }
+
+   public void markFailed(String url) {
+      //TODO: check if the url is the one on top
+//      if (urls.peek().equals(url)) {
+      //put the failed url at the bottom
+      String failed = urls.poll();
+      urls.offer(failed);
+//      }
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/i18n/HaMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/i18n/HaMessages.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/i18n/HaMessages.java
new file mode 100644
index 0000000..ade11b4
--- /dev/null
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/i18n/HaMessages.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl.i18n;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+
+@Messages(logger = "org.apache.hadoop.gateway")
+public interface HaMessages {
+   @Message(level = MessageLevel.ERROR, text = "Failed to Write HA Descriptor: {0}")
+   void failedToWriteHaDescriptor(Exception e);
+
+   @Message(level = MessageLevel.ERROR, text = "Failed to load HA Descriptor: {0}")
+   void failedToLoadHaDescriptor(Exception e);
+
+   @Message(level = MessageLevel.INFO, text = "No Active URL was found for service: {0}")
+   void noActiveUrlFound(String serviceName);
+
+   @Message(level = MessageLevel.INFO, text = "No Service by this name was found: {0}")
+   void noServiceFound(String serviceName);
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-ha/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
new file mode 100644
index 0000000..995a093
--- /dev/null
+++ b/gateway-provider-ha/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.ha.deploy.HaProviderDeploymentContributor

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributorTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
new file mode 100644
index 0000000..c39ca85
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/deploy/HaProviderDeploymentContributorTest.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.deploy;
+
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.fail;
+
+
+public class HaProviderDeploymentContributorTest {
+
+   @Test
+   public void testServiceLoader() throws Exception {
+      ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
+      Iterator iterator = loader.iterator();
+      assertThat( "Service iterator empty.", iterator.hasNext() );
+      while( iterator.hasNext() ) {
+         Object object = iterator.next();
+         if( object instanceof HaProviderDeploymentContributor ) {
+            return;
+         }
+      }
+      fail( "Failed to find " + HaProviderDeploymentContributor.class.getName() + " via service loader." );
+   }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
new file mode 100644
index 0000000..18a42e3
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultHaProviderTest.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.junit.Test;
+
+import java.util.ArrayList;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.isIn;
+import static org.junit.Assert.*;
+
+public class DefaultHaProviderTest {
+
+   @Test
+   public void testDescriptor() {
+      try {
+         new DefaultHaProvider(null);
+         fail("provider construction should have failed with null descriptor");
+      } catch (IllegalArgumentException e) {
+      }
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      assertNotNull(provider.getHaDescriptor());
+      descriptor.addServiceConfig(new DefaultHaServiceConfig("foo"));
+      assertTrue(provider.isHaEnabled("foo"));
+   }
+
+   @Test
+   public void testAddingService() {
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      ArrayList<String> urls = new ArrayList<String>();
+      urls.add("http://host1");
+      urls.add("http://host2");
+      provider.addHaService("foo", urls);
+      assertNull(provider.getActiveURL("bar"));
+      String url = provider.getActiveURL("foo");
+      assertNotNull(url);
+      assertThat(url, isIn(urls));
+   }
+
+   @Test
+   public void testActiveUrl() {
+      HaDescriptor descriptor = new DefaultHaDescriptor();
+      HaProvider provider = new DefaultHaProvider(descriptor);
+      ArrayList<String> urls = new ArrayList<String>();
+      String url1 = "http://host1";
+      urls.add(url1);
+      String url2 = "http://host2";
+      urls.add(url2);
+      String serviceName = "foo";
+      provider.addHaService(serviceName, urls);
+      assertEquals(url1, provider.getActiveURL(serviceName));
+      provider.markFailedURL(serviceName, url1);
+      assertEquals(url2, provider.getActiveURL(serviceName));
+      provider.markFailedURL(serviceName, url2);
+      assertEquals(url1, provider.getActiveURL(serviceName));
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
new file mode 100644
index 0000000..d4ff0f7
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorFactoryTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class HaDescriptorFactoryTest {
+
+   @Test
+   public void testCreateDescriptor() {
+      assertNotNull(HaDescriptorFactory.createDescriptor());
+   }
+
+   @Test
+   public void testCreateServiceConfig() {
+      HaServiceConfig serviceConfig = HaDescriptorFactory.createServiceConfig("foo", "enabled=true;maxFailoverAttempts=42;failoverSleep=50;maxRetryAttempts=1;retrySleep=1000");
+      assertNotNull(serviceConfig);
+      assertTrue(serviceConfig.isEnabled());
+      assertEquals("foo", serviceConfig.getServiceName());
+      assertEquals(42, serviceConfig.getMaxFailoverAttempts());
+      assertEquals(50, serviceConfig.getFailoverSleep());
+      assertEquals(1, serviceConfig.getMaxRetryAttempts());
+      assertEquals(1000, serviceConfig.getRetrySleep());
+
+      serviceConfig = HaDescriptorFactory.createServiceConfig("bar", "false", "3", "1000", "5", "3000");
+      assertNotNull(serviceConfig);
+      assertFalse(serviceConfig.isEnabled());
+      assertEquals("bar", serviceConfig.getServiceName());
+      assertEquals(3, serviceConfig.getMaxFailoverAttempts());
+      assertEquals(1000, serviceConfig.getFailoverSleep());
+      assertEquals(5, serviceConfig.getMaxRetryAttempts());
+      assertEquals(3000, serviceConfig.getRetrySleep());
+
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
new file mode 100644
index 0000000..6f501a8
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/HaDescriptorManagerTest.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.apache.hadoop.gateway.ha.provider.HaDescriptor;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.junit.Test;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.StringWriter;
+
+import static org.junit.Assert.*;
+
+public class HaDescriptorManagerTest {
+
+   @Test
+   public void testDescriptorLoad() throws IOException {
+      String xml = "<ha><service name='foo' maxFailoverAttempts='42' failoverSleep='4000' maxRetryAttempts='2' retrySleep='2213' enabled='false'/>" +
+            "<service name='bar' failoverLimit='3' enabled='true'/></ha>";
+      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
+      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
+      assertNotNull(descriptor);
+      assertEquals(1, descriptor.getEnabledServiceNames().size());
+      HaServiceConfig config =  descriptor.getServiceConfig("foo");
+      assertNotNull(config);
+      assertEquals("foo", config.getServiceName());
+      assertEquals(42, config.getMaxFailoverAttempts());
+      assertEquals(4000, config.getFailoverSleep());
+      assertEquals(2, config.getMaxRetryAttempts());
+      assertEquals(2213, config.getRetrySleep());
+      assertFalse(config.isEnabled());
+      config =  descriptor.getServiceConfig("bar");
+      assertTrue(config.isEnabled());
+   }
+
+   @Test
+   public void testDescriptorDefaults() throws IOException {
+      String xml = "<ha><service name='foo'/></ha>";
+      ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
+      HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
+      assertNotNull(descriptor);
+      assertEquals(1, descriptor.getEnabledServiceNames().size());
+      HaServiceConfig config =  descriptor.getServiceConfig("foo");
+      assertNotNull(config);
+      assertEquals("foo", config.getServiceName());
+      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_FAILOVER_ATTEMPTS, config.getMaxFailoverAttempts());
+      assertEquals(HaServiceConfigConstants.DEFAULT_FAILOVER_SLEEP, config.getFailoverSleep());
+      assertEquals(HaServiceConfigConstants.DEFAULT_MAX_RETRY_ATTEMPTS, config.getMaxRetryAttempts());
+      assertEquals(HaServiceConfigConstants.DEFAULT_RETRY_SLEEP, config.getRetrySleep());
+      assertEquals(HaServiceConfigConstants.DEFAULT_ENABLED, config.isEnabled());
+   }
+
+   @Test
+   public void testDescriptorStoring() throws IOException {
+      HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
+      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("foo", "false", "42", "1000", "3", "3000"));
+      descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("bar", "true", "3", "5000", "5", "8000"));
+      StringWriter writer = new StringWriter();
+      HaDescriptorManager.store(descriptor, writer);
+      String descriptorXml = writer.toString();
+      String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" +
+            "<ha>\n" +
+            "  <service enabled=\"false\" failoverSleep=\"1000\" maxFailoverAttempts=\"42\" maxRetryAttempts=\"3\" name=\"foo\" retrySleep=\"3000\"/>\n" +
+            "  <service enabled=\"true\" failoverSleep=\"5000\" maxFailoverAttempts=\"3\" maxRetryAttempts=\"5\" name=\"bar\" retrySleep=\"8000\"/>\n" +
+            "</ha>\n";
+      assertEquals(xml, descriptorXml);
+   }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerTest.java b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerTest.java
new file mode 100644
index 0000000..cb3f451
--- /dev/null
+++ b/gateway-provider-ha/src/test/java/org/apache/hadoop/gateway/ha/provider/impl/URLManagerTest.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.ha.provider.impl;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class URLManagerTest {
+
+   @Test
+   public void testActiveURLManagement() {
+      ArrayList<String> urls = new ArrayList<String>();
+      String url1 = "http://host1";
+      urls.add(url1);
+      String url2 = "http://host2";
+      urls.add(url2);
+      URLManager manager = new URLManager(urls);
+      assertTrue(manager.getURLs().containsAll(urls));
+      assertEquals(url1, manager.getActiveURL());
+      manager.markFailed(url1);
+      assertEquals(url2, manager.getActiveURL());
+      manager.markFailed(url2);
+      assertEquals(url1, manager.getActiveURL());
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-jersey/src/test/java/org/apache/hadoop/gateway/jersey/JerseyDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-jersey/src/test/java/org/apache/hadoop/gateway/jersey/JerseyDeploymentContributorTest.java b/gateway-provider-jersey/src/test/java/org/apache/hadoop/gateway/jersey/JerseyDeploymentContributorTest.java
index 914ffa4..0721ddc 100644
--- a/gateway-provider-jersey/src/test/java/org/apache/hadoop/gateway/jersey/JerseyDeploymentContributorTest.java
+++ b/gateway-provider-jersey/src/test/java/org/apache/hadoop/gateway/jersey/JerseyDeploymentContributorTest.java
@@ -100,7 +100,7 @@ public class JerseyDeploymentContributorTest {
     Service service = new Service();
     service.setRole( "test-service-role" );
     service.setName( "test-service-name" );
-    service.setUrl( "http://test-service-host:777/test-service-path" );
+    service.addUrl( "http://test-service-host:777/test-service-path" );
 
     // This should end up calling providerContributor.contributeFilter
     serviceContributor.contributeService( context, service );

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/pom.xml b/gateway-provider-rewrite-func-service-registry/pom.xml
index 2f0a4df..9f4506a 100644
--- a/gateway-provider-rewrite-func-service-registry/pom.xml
+++ b/gateway-provider-rewrite-func-service-registry/pom.xml
@@ -56,6 +56,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-provider-rewrite</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-provider-ha</artifactId>
+        </dependency>
 
         <!-- ********** ********** ********** ********** ********** ********** -->
         <!-- ********** Test Dependencies                           ********** -->

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionProcessorBase.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionProcessorBase.java b/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionProcessorBase.java
index 5e4479f..f14ff7b 100644
--- a/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionProcessorBase.java
+++ b/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionProcessorBase.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 
@@ -28,6 +30,7 @@ abstract class ServiceRegistryFunctionProcessorBase<T extends UrlRewriteFunction
   private String cluster;
   private GatewayServices services;
   private ServiceRegistry registry;
+  private HaProvider haProvider;
 
   @Override
   public void initialize( UrlRewriteEnvironment environment, T descriptor ) throws Exception {
@@ -46,6 +49,7 @@ abstract class ServiceRegistryFunctionProcessorBase<T extends UrlRewriteFunction
     if( registry == null ) {
       throw new IllegalArgumentException( "registry==null" );
     }
+    haProvider = environment.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME);
   }
 
   @Override
@@ -55,8 +59,10 @@ abstract class ServiceRegistryFunctionProcessorBase<T extends UrlRewriteFunction
   }
 
   public String lookupServiceUrl( String role ) throws Exception {
-    String url = registry.lookupServiceURL( cluster, role );
-    return url;
+    if (haProvider != null && haProvider.isHaEnabled(role)) {
+       return haProvider.getActiveURL(role);
+    }
+    return registry.lookupServiceURL( cluster, role );
   }
 
   String cluster() {

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessor.java b/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessor.java
index 2adb3ff..912fd3f 100644
--- a/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessor.java
+++ b/gateway-provider-rewrite-func-service-registry/src/main/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessor.java
@@ -49,17 +49,16 @@ public class ServiceUrlFunctionProcessor
     return results;
   }
 
-  public String resolve( String parameter ) throws Exception {
-    String url = lookupServiceUrl( parameter );
-    if( url != null ) {
-      URI outputUri;
-      URI inputUri = new URI( url );
-      String host = inputUri.getHost();
-      outputUri = new URI( inputUri.getScheme(), inputUri.getUserInfo(), host, inputUri.getPort(), inputUri.getPath(), inputUri.getQuery(), inputUri.getFragment() );
-      parameter = outputUri.toString();
-    }
-    return parameter;
+  private String resolve( String parameter ) throws Exception {
+     String url = lookupServiceUrl( parameter );
+     if( url != null ) {
+        URI outputUri;
+        URI inputUri = new URI( url );
+        String host = inputUri.getHost();
+        outputUri = new URI( inputUri.getScheme(), inputUri.getUserInfo(), host, inputUri.getPort(), inputUri.getPath(), inputUri.getQuery(), inputUri.getFragment() );
+        parameter = outputUri.toString();
+     }
+     return parameter;
   }
-
 }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceAddressFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceAddressFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceAddressFunctionProcessorTest.java
index 5e99482..5465907 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceAddressFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceAddressFunctionProcessorTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.svcregfunc.api.ServiceAddressFunctionDescriptor;
@@ -31,10 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -63,7 +62,13 @@ public class ServiceAddressFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceAddressFunctionDescriptor.class );
 
-    EasyMock.replay( reg, svc, env, desc, ctx );
+    HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+    EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+    EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+    EasyMock.replay( reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceHostFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceHostFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceHostFunctionProcessorTest.java
index e9f2e6b..5c2db7f 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceHostFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceHostFunctionProcessorTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.svcregfunc.api.ServiceHostFunctionDescriptor;
@@ -31,10 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -63,7 +62,14 @@ public class ServiceHostFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceHostFunctionDescriptor.class );
 
-    EasyMock.replay( reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( reg, svc, env, desc, ctx, haProvider );
+
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedAddressFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedAddressFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedAddressFunctionProcessorTest.java
index c1f2b06..7da18c9 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedAddressFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedAddressFunctionProcessorTest.java
@@ -21,6 +21,8 @@ import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriter;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.hostmap.HostMapper;
 import org.apache.hadoop.gateway.services.hostmap.HostMapperService;
@@ -34,10 +36,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -76,7 +75,13 @@ public class ServiceMappedAddressFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceMappedAddressFunctionDescriptor.class );
 
-    EasyMock.replay( hm, hms, reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( hm, hms, reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test


Mime
View raw message