knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kmin...@apache.org
Subject [3/4] KNOX-88: Support HDFS HA
Date Fri, 15 Aug 2014 20:41:31 GMT
http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedHostFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedHostFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedHostFunctionProcessorTest.java
index e38442a..6e9df55 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedHostFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedHostFunctionProcessorTest.java
@@ -21,6 +21,8 @@ import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriter;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.hostmap.HostMapper;
 import org.apache.hadoop.gateway.services.hostmap.HostMapperService;
@@ -34,10 +36,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -76,7 +75,13 @@ public class ServiceMappedHostFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceMappedHostFunctionDescriptor.class );
 
-    EasyMock.replay( hm, hms, reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( hm, hms, reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedUrlFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedUrlFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedUrlFunctionProcessorTest.java
index 5d01f5d..4ce0c5d 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedUrlFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceMappedUrlFunctionProcessorTest.java
@@ -21,6 +21,8 @@ import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriter;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.hostmap.HostMapper;
 import org.apache.hadoop.gateway.services.hostmap.HostMapperService;
@@ -34,10 +36,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -76,7 +75,14 @@ public class ServiceMappedUrlFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceMappedUrlFunctionDescriptor.class );
 
-    EasyMock.replay( hm, hms, reg, svc, env, desc, ctx );
+
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( hm, hms, reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePathFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePathFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePathFunctionProcessorTest.java
index fc344ae..fb44b2e 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePathFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePathFunctionProcessorTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.svcregfunc.api.ServicePathFunctionDescriptor;
@@ -31,10 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -63,7 +62,13 @@ public class ServicePathFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServicePathFunctionDescriptor.class );
 
-    EasyMock.replay( reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePortFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePortFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePortFunctionProcessorTest.java
index 60ee9d7..d14a8de 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePortFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServicePortFunctionProcessorTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.svcregfunc.api.ServicePortFunctionDescriptor;
@@ -31,10 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -63,7 +62,13 @@ public class ServicePortFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServicePortFunctionDescriptor.class );
 
-    EasyMock.replay( reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
index ae21d67..d7c2ba3 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
@@ -39,13 +39,7 @@ import org.hamcrest.core.Is;
 import org.junit.Test;
 
 import javax.security.auth.Subject;
-import javax.servlet.DispatcherType;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
+import javax.servlet.*;
 import javax.servlet.http.HttpServletRequest;
 import java.io.IOException;
 import java.net.URISyntaxException;
@@ -80,7 +74,7 @@ public class ServiceRegistryFunctionsTest {
     EasyMock.expect( mockServiceRegistry.lookupServiceURL( "test-cluster", "JOBTRACKER" ) ).andReturn( "test-jt-scheme://test-jt-host:511" ).anyTimes();
 
     GatewayServices mockGatewayServices = EasyMock.createNiceMock( GatewayServices.class );
-    EasyMock.expect( mockGatewayServices.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( mockServiceRegistry ).anyTimes();
+    EasyMock.expect( mockGatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE) ).andReturn( mockServiceRegistry ).anyTimes();
 
     EasyMock.replay( mockServiceRegistry, mockGatewayServices );
 

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceSchemeFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceSchemeFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceSchemeFunctionProcessorTest.java
index f99e478..02919c5 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceSchemeFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceSchemeFunctionProcessorTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.svcregfunc.impl;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.svcregfunc.api.ServiceSchemeFunctionDescriptor;
@@ -31,10 +33,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -63,7 +62,13 @@ public class ServiceSchemeFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceSchemeFunctionDescriptor.class );
 
-    EasyMock.replay( reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+     EasyMock.replay( reg, svc, env, desc, ctx, haProvider );
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessorTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessorTest.java
index b04c2c1..2b94df6 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/hadoop/gateway/svcregfunc/impl/ServiceUrlFunctionProcessorTest.java
@@ -21,6 +21,8 @@ import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriter;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
 import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.hostmap.HostMapper;
 import org.apache.hadoop.gateway.services.hostmap.HostMapperService;
@@ -34,10 +36,7 @@ import java.util.Arrays;
 import java.util.Iterator;
 import java.util.ServiceLoader;
 
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
+import static org.hamcrest.CoreMatchers.*;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.collection.IsIterableContainingInOrder.contains;
 import static org.junit.Assert.fail;
@@ -76,7 +75,13 @@ public class ServiceUrlFunctionProcessorTest {
 
     desc = EasyMock.createNiceMock( ServiceUrlFunctionDescriptor.class );
 
-    EasyMock.replay( hm, hms, reg, svc, env, desc, ctx );
+     HaProvider haProvider = EasyMock.createNiceMock( HaProvider.class );
+
+     EasyMock.expect(env.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
+
+     EasyMock.expect(haProvider.isHaEnabled(EasyMock.anyObject(String.class))).andReturn(Boolean.FALSE).anyTimes();
+
+    EasyMock.replay( hm, hms, reg, svc, env, desc, ctx, haProvider);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 579a5c6..9155c5c 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -197,6 +197,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-provider-identity-assertion-pseudo</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-provider-ha</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>${gateway-group}</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 02c87bf..40aaa8d 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -325,5 +325,4 @@ public interface GatewayMessages {
 
   @Message( level = MessageLevel.INFO, text = "Computed roles/groups: {0} for principal: {1}" )
   void lookedUpUserRoles(Set<String> roleNames, String userName);
-
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
index 99ce32c..45e35b2 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
@@ -291,7 +291,7 @@ public abstract class DeploymentFactory {
             ServiceRegistry sr = (ServiceRegistry) gatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
             if (sr != null) {
               String regCode = sr.getRegistrationCode(topology.getName());
-              sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrl() );
+              sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrls() );
             }
           }
         } catch( Exception e ) {

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntity.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntity.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntity.java
deleted file mode 100644
index 340e852..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntity.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.http.HttpEntity;
-import org.apache.http.annotation.NotThreadSafe;
-import org.apache.http.entity.HttpEntityWrapper;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-@NotThreadSafe
-public class PartiallyRepeatableHttpEntity extends HttpEntityWrapper {
-
-  public static final int DEFAULT_BUFFER_SIZE = 4096;
-
-  private int replayWriteIndex;
-  private int replayWriteLimit;
-  private byte[] replayBuffer;
-  private ReplayStream finalStream;
-  private InputStream wrappedStream;
-
-  public PartiallyRepeatableHttpEntity( final HttpEntity entity, int bufferSize ) throws IOException {
-    super( entity );
-    this.wrappedStream = null;
-    this.finalStream = null;
-    this.replayWriteIndex = -1;
-    if( !entity.isRepeatable() ) {
-      this.replayBuffer = new byte[ bufferSize ];
-      this.replayWriteLimit = bufferSize-1;
-    } else {
-      this.replayBuffer = null;
-    }
-  }
-
-  public PartiallyRepeatableHttpEntity( final HttpEntity entity ) throws IOException {
-    this( entity, DEFAULT_BUFFER_SIZE );
-  }
-
-  @Override
-  public boolean isRepeatable() {
-    return true;
-  }
-
-  @Override
-  public boolean isStreaming() {
-    return wrappedEntity.isStreaming();
-  }
-
-  @Override
-  public boolean isChunked() {
-    return wrappedEntity.isChunked();
-  }
-
-  @Override
-  public long getContentLength() {
-    return wrappedEntity.getContentLength();
-  }
-
-  // This will throw an IOException if an attempt is made to getContent a second time after
-  // more bytes than the buffer can hold has been read on the first stream.
-  @Override
-  public InputStream getContent() throws IOException {
-    // If the wrapped stream is repeatable return it directly.
-    if( replayBuffer == null ) {
-      return wrappedEntity.getContent();
-    // Else if the buffer has overflowed
-    } else if( finalStream != null ) {
-      throw new IOException( "TODO - Existing stream already past replay buffer capacity" );
-    } else {
-      if( wrappedStream == null ) {
-         wrappedStream = wrappedEntity.getContent();
-      }
-      return new ReplayStream();
-    }
-  }
-
-  @Override
-  public void writeTo( final OutputStream stream ) throws IOException {
-    IOUtils.copy( getContent(), stream );
-  }
-
-  @Override
-  @SuppressWarnings( "deprecation" )
-  public void consumeContent() throws IOException {
-    throw new UnsupportedOperationException();
-  }
-
-  private class ReplayStream extends InputStream {
-
-    private int replayReadIndex = -1;
-
-    @Override
-    public int read() throws IOException {
-      int b;
-      if( finalStream != null && finalStream != this ) {
-        throw new IOException( "TODO - Replay stream taken over by another consumer." );
-      }
-      // If we can read from the buffer do so.
-      if( replayReadIndex < replayWriteIndex ) {
-        b = replayBuffer[ ++replayReadIndex ];
-      } else {
-        b = wrappedStream.read();
-        // If the underlying stream is not closed.
-        if( b > -1 ) {
-          if( replayWriteIndex < replayWriteLimit ) {
-            replayBuffer[ ++replayWriteIndex ] = (byte)b;
-            replayReadIndex++;
-          } else {
-            finalStream = this;
-          }
-        }
-      }
-      return b;
-    }
-
-    public int read( byte buffer[], int offset, int limit ) throws IOException {
-      int count = -1;
-      if( finalStream != null && finalStream != this ) {
-        throw new IOException( "TODO - Replay stream taken over by another consumer." );
-      }
-      // If we can read from the buffer do so.
-      if( replayReadIndex < replayWriteIndex ) {
-        count = replayWriteIndex - replayReadIndex;
-        count = Math.min( limit, count );
-        System.arraycopy( replayBuffer, replayReadIndex+1, buffer, offset, count );
-        replayReadIndex += count;
-      } else {
-        count = wrappedStream.read( buffer, offset, limit );
-        // If the underlying stream is not closed.
-        if( count > -1 ) {
-          if( replayWriteIndex+count < replayWriteLimit ) {
-            System.arraycopy( buffer, offset, replayBuffer, replayWriteIndex+1, count );
-            replayReadIndex += count;
-            replayWriteIndex += count;
-          } else {
-            finalStream = this;
-          }
-        }
-      }
-      return count;
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
index e18f327..12d22ad 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.gateway.services.security.CryptoService;
 import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
@@ -81,7 +82,7 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
     registry.remove(clusterName);
   }
 
-  public boolean registerService(String regCode, String clusterName, String serviceName, String url) {
+  public boolean registerService(String regCode, String clusterName, String serviceName, List<String> urls) {
     boolean rc = false;
     // verify the signature of the regCode
     if (regCode == null) {
@@ -102,7 +103,7 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
       RegEntry regEntry = new RegEntry();
       regEntry.setClusterName(clusterName);
       regEntry.setServiceName(serviceName);
-      regEntry.setUrl(url);
+      regEntry.setUrls(urls);
       clusterServices.put(serviceName , regEntry);
       String json = renderAsJsonString(registry);
       try {
@@ -131,17 +132,26 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
     return json;
   }
   
+  @Override
   public String lookupServiceURL(String clusterName, String serviceName) {
-    String url = null;
+    List<String> urls = lookupServiceURLs( clusterName, serviceName );
+    if ( urls != null && !urls.isEmpty() ) {
+      return urls.get( 0 );
+    }
+    return null;
+  }
+
+  @Override
+  public List<String> lookupServiceURLs( String clusterName, String serviceName ) {
     RegEntry entry = null;
     HashMap clusterServices = registry.get(clusterName);
     if (clusterServices != null) {
       entry = (RegEntry) clusterServices.get(serviceName);
       if( entry != null ) {
-        url = entry.url;
+        return entry.getUrls();
       }
     }
-    return url;
+    return null;
   }
   
   private HashMap<String, HashMap<String,RegEntry>> getMapFromJsonString(String json) {

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
index 847d72e..6d5c269 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
@@ -17,10 +17,12 @@
  */
 package org.apache.hadoop.gateway.services.registry.impl;
 
+import java.util.List;
+
 public class RegEntry {
   public String clusterName;
   public String serviceName;
-  public String url;
+  public List<String> urls;
 
   public RegEntry() {
   }
@@ -41,12 +43,12 @@ public class RegEntry {
     this.serviceName = serviceName;
   }
 
-  public String getUrl() {
-    return url;
+  public List<String> getUrls() {
+    return urls;
   }
 
-  public void setUrl(String url) {
-    this.url = url;
+  public void setUrls( List<String> urls) {
+    this.urls = urls;
   }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
index 2c7a88e..985fd6b 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
@@ -37,6 +37,7 @@ public class ServicePropertyInterpreter extends AbstractInterpreter {
         this.topology = topology;
     }
 
+    @Override
     public void interpret(String token, String value) throws InterpretException {
         int dotPosition = token.indexOf(DOT);
         if (dotPosition == -1) {
@@ -67,7 +68,7 @@ public class ServicePropertyInterpreter extends AbstractInterpreter {
         }
 
         if (SERVICE_URL.equalsIgnoreCase(nextToken)) {
-            service.setUrl( value );
+            service.addUrl( value );
         } else {
           dotPosition = nextToken.indexOf(DOT);
           if (dotPosition != -1) {

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index 7992e0f..745c661 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@ -48,7 +48,7 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
     forPattern( ROOT_TAG + "/" + SERVICE_TAG ).createObject().ofType( Service.class ).then().setNext( "addService" );
     forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + ROLE_TAG ).setBeanProperty();
     forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + NAME_TAG ).setBeanProperty();
-    forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + URL_TAG ).setBeanProperty();
+    forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + URL_TAG ).callMethod( "addUrl" ).usingElementBodyAsArgument();
     forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
     forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
     forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG + "/" + VALUE_TAG ).setBeanProperty();

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntityTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntityTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntityTest.java
deleted file mode 100644
index 42528bb..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/dispatch/PartiallyRepeatableHttpEntityTest.java
+++ /dev/null
@@ -1,874 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.dispatch;
-
-import org.apache.http.entity.BasicHttpEntity;
-import org.apache.http.entity.BufferedHttpEntity;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.InputStreamEntity;
-import org.junit.Test;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.Charset;
-
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.fail;
-
-public class PartiallyRepeatableHttpEntityTest {
-
-  private static Charset UTF8 = Charset.forName( "UTF-8" );
-
-  // Variables
-  // Consumers: C1, C2
-  // Reads: FC - Full Content, PC - Partial Content, AC - Any Content
-  // Reads: IB - In Buffer, OB - Overflow Buffer
-  // Close: XC
-  // Expect: EE
-
-  // Test Cases
-  // C1 FC
-  //   C1 FC/IB.
-  //   C1 FC/OB.
-  //   C1 FC/IB; C2 FC.
-  //   C1 FC/OB; C2 AC; EE
-  //   C1 FC/IB; C1 XC; C2 FC.
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  // C1 PC
-  //   C1 PC/IB.
-  //   C1 PC/OB.
-  //   C1 PC/IB; C2 FC.
-  //   C1 PC/OB; C2 AC; EE
-  //   C1 PC/IB; C1 XC; C2 FC.
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  // C1 C2 C1
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-
-  @Test
-  public void testS__C1_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testB__C1_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testS__C1_FC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testB__C1_FC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testS_C1_FC_IB__C2_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testB_C1_FC_IB__C2_FC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( "UTF-8" ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-  }
-
-  @Test
-  public void testS_C1_FC_OB__C2_AC__EE() throws Exception {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    String output;
-
-    output = byteRead( replay.getContent(), -1 );
-    assertThat( output, is( data ) );
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  @Test
-  public void testB_C1_FC_OB__C2_AC__EE() throws Exception {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    String output;
-
-    output = blockRead( replay.getContent(), UTF8, -1, 3 );
-    assertThat( output, is( data ) );
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 FC/IB; C1 XC; C2 FC.
-  @Test
-  public void testS_C1_FC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 FC/IB; C1 XC; C2 FC.
-  @Test
-  public void testB_C1_FC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 3 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 3 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testS_C1_FC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 FC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testB_C1_FC_OB__C1_XC__C2_AC_EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 3 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB.
-  @Test
-  public void testS_C1_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 3 );
-    assertThat( text, is( "012" ) );
-  }
-
-  //   C1 PC/IB.
-  @Test
-  public void testB_C1_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 3, 3 );
-    assertThat( text, is( "012" ) );
-  }
-
-  //   C1 PC/OB.
-  @Test
-  public void testS_C1_PC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-  }
-
-  //   C1 PC/OB.
-  @Test
-  public void testB_C1_PC_OB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 4 );
-    assertThat( text, is( "0123456789" ) );
-    stream.close();
-  }
-
-  //   C1 PC/IB; C2 FC.
-  @Test
-  public void testS_C1_PC_IB__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 4 );
-    assertThat( text, is( "0123" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/IB; C2 FC.
-  @Test
-  public void testB_C1_PC_IB__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 4, 1 );
-    assertThat( text, is( "0123" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 7 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/OB; C2 AC; EE
-  @Test
-  public void testS_C1_PC_OB__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 7 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/OB; C2 AC; EE
-  @Test
-  public void testB_C1_PC_OB__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 7, 2 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB; C1 XC; C2 FC.
-  @Test
-  public void testS_C1_PC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 7 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = byteRead( stream, -1 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/IB; C1 XC; C2 FC.
-  @Test
-  public void testB_C1_PC_IB__C1_XC__C2_FC() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 7, 2 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, -1, 7 );
-    assertThat( text, is( "0123456789" ) );
-  }
-
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testS_C1_PC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = byteRead( stream, 7 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/OB; C1 XC; C2 AC; EE
-  @Test
-  public void testB_C1_PC_OB__C1_XC__C2_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream = replay.getContent();
-    text = blockRead( stream, UTF8, 7, 2 );
-    assertThat( text, is( "0123456" ) );
-    stream.close();
-
-    try {
-      replay.getContent();
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  @Test
-  public void testS_C1_PC_IB__C2_PC_IB__C2_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-
-    stream1 = replay.getContent();
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = byteRead( stream2, 4 );
-    assertThat( text, is( "0123" ) );
-
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "345" ) );
-  }
-
-  //   C1 PC/IB; C2 PC/IB; C1 PC/IB; C2 PC/IB - Back and forth before buffer overflow is OK.
-  @Test
-  public void testB_C1_PC_IB__C2_PC_IB__C2_PC_IB() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 20 );
-    stream1 = replay.getContent();
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = blockRead( stream2, UTF8, 4, 3 );
-    assertThat( text, is( "0123" ) );
-
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "345" ) );
-  }
-
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-  @Test
-  public void testS_C1_PC_IB__C2_PC_OB__C1_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream1 = replay.getContent();
-    text = byteRead( stream1, 3 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = byteRead( stream2, 6 );
-    assertThat( text, is( "012345" ) );
-
-    try {
-      byteRead( stream1, 1 );
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  //   C1 PC/IB; C2 PC/OB; C1 AC; EE
-  @Test
-  public void testB_C1_PC_IB__C2_PC_OB__C1_AC__EE() throws IOException {
-    String data = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-    InputStream stream1, stream2;
-    String text;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( data.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    stream1 = replay.getContent();
-    text = blockRead( stream1, UTF8, 3, 2 );
-    assertThat( text, is( "012" ) );
-
-    stream2 = replay.getContent();
-    text = blockRead( stream2, UTF8, 6, 4 );
-    assertThat( text, is( "012345" ) );
-
-    try {
-      blockRead( stream1, UTF8, 6, 4 );
-      fail( "Expected IOException" );
-    } catch ( IOException e ) {
-      // Expected.
-    }
-  }
-
-  @Test
-  public void testWriteTo() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    ByteArrayOutputStream buffer = new ByteArrayOutputStream();
-    replay.writeTo( buffer );
-    String output = new String( buffer.toByteArray(), UTF8 );
-    assertThat( output, is( input ) );
-  }
-
-  @Test
-  public void testIsRepeatable() throws Exception {
-    String text = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( text.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic );
-    assertThat( replay.isRepeatable(), is( true ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( text.getBytes( UTF8 ) ) );
-    BufferedHttpEntity buffered = new BufferedHttpEntity( basic );
-    replay = new PartiallyRepeatableHttpEntity( buffered );
-    assertThat( replay.isRepeatable(), is( true ) );
-  }
-
-  @Test
-  public void testIsChunked() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.isChunked(), is( false ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setChunked( true );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.isChunked(), is( true ) );
-  }
-
-  @Test
-  public void testGetContentLength() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentLength(), is( -1L ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentLength( input.length() );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentLength(), is( 10L ) );
-  }
-
-  @Test
-  public void testGetContentType() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentType(), nullValue() );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentType( ContentType.APPLICATION_JSON.getMimeType() );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentType().getValue(), is( "application/json" ) );
-  }
-
-  @Test
-  public void testGetContentEncoding() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentEncoding(), nullValue() );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    basic.setContentEncoding( "UTF-8" );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.getContentEncoding().getValue(), is( "UTF-8" ) );
-  }
-
-  @Test
-  public void testIsStreaming() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    InputStreamEntity streaming;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.isStreaming(), is( true ) );
-
-    basic = new BasicHttpEntity();
-    basic.setContent( null );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-    assertThat( replay.isStreaming(), is( false ) );
-
-    streaming = new InputStreamEntity( new ByteArrayInputStream( input.getBytes( UTF8 ) ), 10, ContentType.TEXT_PLAIN );
-    replay = new PartiallyRepeatableHttpEntity( streaming, 5 );
-    assertThat( replay.isStreaming(), is( true ) );
-  }
-
-  @Test
-  public void testConsumeContent() throws Exception {
-    String input = "0123456789";
-    BasicHttpEntity basic;
-    PartiallyRepeatableHttpEntity replay;
-
-    basic = new BasicHttpEntity();
-    basic.setContent( new ByteArrayInputStream( input.getBytes( UTF8 ) ) );
-    replay = new PartiallyRepeatableHttpEntity( basic, 5 );
-
-    try {
-      replay.consumeContent();
-      fail( "Expected UnsupportedOperationException" );
-    } catch ( UnsupportedOperationException e ) {
-      // Expected.
-    }
-  }
-
-  private static String byteRead( InputStream stream, int total ) throws IOException {
-    StringBuilder string = null;
-    int c = 0;
-    if( total < 0 ) {
-      total = Integer.MAX_VALUE;
-    }
-    while( total > 0 && c >= 0 ) {
-      c = stream.read();
-      if( c >= 0 ) {
-        total--;
-        if( string == null ) {
-          string = new StringBuilder();
-        }
-        string.append( (char)c );
-      }
-    }
-    return string == null ? null : string.toString();
-  }
-
-  private static String blockRead( InputStream stream, Charset charset, int total, int chunk ) throws IOException {
-    StringBuilder string = null;
-    byte buffer[] = new byte[ chunk ];
-    int count = 0;
-    if( total < 0 ) {
-      total = Integer.MAX_VALUE;
-    }
-    while( total > 0 && count >= 0 ) {
-      count = stream.read( buffer, 0, Math.min( buffer.length, total ) );
-      if( count >= 0 ) {
-        total -= count;
-        if( string == null ) {
-          string = new StringBuilder();
-        }
-        string.append( new String( buffer, 0, count, charset ) );
-      }
-    }
-    return string == null ? null : string.toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
index b32a338..a164aeb 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
@@ -36,6 +36,7 @@ import java.net.URL;
 import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 import static org.hamcrest.collection.IsMapContaining.hasEntry;
 import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsCollectionContaining.hasItem;
 import static org.hamcrest.core.IsNull.notNullValue;
 import static org.hamcrest.core.IsNull.nullValue;
 import static org.junit.Assert.assertThat;
@@ -72,7 +73,9 @@ public class TopologyRulesModuleTest {
     Service comp = topology.getServices().iterator().next();
     assertThat( comp, notNullValue() );
     assertThat( comp.getRole(), is( "WEBHDFS" ) );
-    assertThat( comp.getUrl(), is( "http://host:80/webhdfs" ) );
+    assertThat( comp.getUrls().size(), is( 2 ) );
+    assertThat( comp.getUrls(), hasItem( "http://host1:80/webhdfs" ) );
+    assertThat( comp.getUrls(), hasItem( "http://host2:80/webhdfs" ) );
 
     Provider provider = topology.getProviders().iterator().next();
     assertThat( provider, notNullValue() );
@@ -107,7 +110,9 @@ public class TopologyRulesModuleTest {
     Service service = topology.getServices().iterator().next();
     assertThat( service, notNullValue() );
     assertThat( service.getRole(), is( "test-service-role" ) );
-    assertThat( service.getUrl(), is( "test-service-scheme://test-service-host:42/test-service-path" ) );
+    assertThat( service.getUrls().size(), is( 2 ) );
+    assertThat( service.getUrls(), hasItem( "test-service-scheme://test-service-host1:42/test-service-path" ) );
+    assertThat( service.getUrls(), hasItem( "test-service-scheme://test-service-host2:42/test-service-path" ) );
     assertThat( service.getName(), is( "test-service-name" ) );
     assertThat( service.getParams(), hasEntry( is( "test-service-param-name-1" ), is( "test-service-param-value-1" ) ) );
     assertThat( service.getParams(), hasEntry( is( "test-service-param-name-2" ), is( "test-service-param-value-2" ) ) );
@@ -135,25 +140,30 @@ public class TopologyRulesModuleTest {
     assertThat( webhdfsService, notNullValue() );
     assertThat( webhdfsService.getRole(), is( "WEBHDFS" ) );
     assertThat( webhdfsService.getName(), nullValue() );
-    assertThat( webhdfsService.getUrl(), is( "http://host:50070/webhdfs" ) );
+    assertThat( webhdfsService.getUrls().size(), is( 2 ) );
+    assertThat( webhdfsService.getUrls(), hasItem( "http://host1:50070/webhdfs" ) );
+    assertThat( webhdfsService.getUrls(), hasItem( "http://host2:50070/webhdfs" ) );
 
     Service webhcatService = topology.getService( "WEBHCAT", null );
     assertThat( webhcatService, notNullValue() );
     assertThat( webhcatService.getRole(), is( "WEBHCAT" ) );
     assertThat( webhcatService.getName(), nullValue() );
-    assertThat( webhcatService.getUrl(), is( "http://host:50111/templeton" ) );
+    assertThat( webhcatService.getUrls().size(), is( 1 ) );
+    assertThat( webhcatService.getUrls(), hasItem( "http://host:50111/templeton" ) );
 
     Service oozieService = topology.getService( "OOZIE", null );
     assertThat( oozieService, notNullValue() );
     assertThat( oozieService.getRole(), is( "OOZIE" ) );
     assertThat( oozieService.getName(), nullValue() );
-    assertThat( oozieService.getUrl(), is( "http://host:11000/oozie" ) );
+    assertThat( webhcatService.getUrls().size(), is( 1 ) );
+    assertThat( oozieService.getUrls(), hasItem( "http://host:11000/oozie" ) );
 
     Service hiveService = topology.getService( "HIVE", null );
     assertThat( hiveService, notNullValue() );
     assertThat( hiveService.getRole(), is( "HIVE" ) );
     assertThat( hiveService.getName(), nullValue() );
-    assertThat( hiveService.getUrl(), is( "http://host:10000" ) );
+    assertThat( webhcatService.getUrls().size(), is( 1 ) );
+    assertThat( hiveService.getUrls(), hasItem( "http://host:10000" ) );
 
     Provider authenticationProvider = topology.getProvider( "authentication", "ShiroProvider" );
     assertThat( authenticationProvider, notNullValue() );
@@ -200,7 +210,9 @@ public class TopologyRulesModuleTest {
     Service service = topology.getServices().iterator().next();
     assertThat( service, notNullValue() );
     assertThat( service.getRole(), is( "test-service-role" ) );
-    assertThat( service.getUrl(), is( "test-service-scheme://test-service-host:42/test-service-path" ) );
+    assertThat( service.getUrls().size(), is( 2 ) );
+    assertThat( service.getUrls(), hasItem( "test-service-scheme://test-service-host1:42/test-service-path" ) );
+    assertThat( service.getUrls(), hasItem( "test-service-scheme://test-service-host2:42/test-service-path" ) );
     assertThat( service.getName(), is( "test-service-name" ) );
     assertThat( service.getParams(), hasEntry( is( "test-service-param-name-1" ), is( "test-service-param-value-1" ) ) );
     assertThat( service.getParams(), hasEntry( is( "test-service-param-name-2" ), is( "test-service-param-value-2" ) ) );

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-ambari-format.conf
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-ambari-format.conf b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-ambari-format.conf
index e1622c9..8d84336 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-ambari-format.conf
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-ambari-format.conf
@@ -37,7 +37,11 @@
 
     <property>
         <name>topology.service.test-service-role.test-service-name.url</name>
-        <value>test-service-scheme://test-service-host:42/test-service-path</value>
+        <value>test-service-scheme://test-service-host1:42/test-service-path</value>
+    </property>
+    <property>
+        <name>topology.service.test-service-role.test-service-name.url</name>
+        <value>test-service-scheme://test-service-host2:42/test-service-path</value>
     </property>
     <property>
         <name>topology.service.test-service-role.test-service-name.param.test-service-param-name-1</name>

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
index 9443811..a7c476e 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
@@ -31,7 +31,8 @@
     <service>
         <role>test-service-role</role>
         <name>test-service-name</name>
-        <url>test-service-scheme://test-service-host:42/test-service-path</url>
+        <url>test-service-scheme://test-service-host1:42/test-service-path</url>
+        <url>test-service-scheme://test-service-host2:42/test-service-path</url>
         <param>
             <name>test-service-param-name-1</name>
             <value>test-service-param-value-1</value>

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
index 594bb12..5df5aaa 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-ambari-format.conf
@@ -58,7 +58,11 @@
     </property>
     <property>
         <name>topology.service.WEBHDFS..url</name>
-        <value>http://host:50070/webhdfs</value>
+        <value>http://host1:50070/webhdfs</value>
+    </property>
+    <property>
+        <name>topology.service.WEBHDFS..url</name>
+        <value>http://host2:50070/webhdfs</value>
     </property>
     <property>
         <name>topology.service.WEBHCAT..url</name>

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
index 7d66ef1..f325e11 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
@@ -63,6 +63,7 @@
 
     <service>
         <role>WEBHDFS</role>
-        <url>http://host:80/webhdfs</url>
+        <url>http://host1:80/webhdfs</url>
+        <url>http://host2:80/webhdfs</url>
     </service>
 </topology>

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-service-webhdfs/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/pom.xml b/gateway-service-webhdfs/pom.xml
index e1b666e..f88e3d0 100644
--- a/gateway-service-webhdfs/pom.xml
+++ b/gateway-service-webhdfs/pom.xml
@@ -46,12 +46,38 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-provider-rewrite</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-provider-ha</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-library</artifactId>
+            <scope>test</scope>
+        </dependency>
+
     </dependencies>
 
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java
new file mode 100644
index 0000000..d5db9a7
--- /dev/null
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaHttpClientDispatch;
+import org.apache.hadoop.gateway.topology.Provider;
+import org.apache.hadoop.gateway.topology.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class NameNodeHaDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
+
+   private static final String ROLE = "dispatch";
+
+   private static final String NAME = "ha-http-client";
+
+   @Override
+   public String getRole() {
+      return ROLE;
+   }
+
+   @Override
+   public String getName() {
+      return NAME;
+   }
+
+   @Override
+   public void contributeFilter(DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params) {
+      if (params == null) {
+         params = new ArrayList<FilterParamDescriptor>();
+      }
+      params.add(resource.createFilterParam().name(WebHdfsHaHttpClientDispatch.RESOURCE_ROLE_ATTRIBUTE).value(resource.role()));
+      Map<String, String> providerParams = provider.getParams();
+      for (Map.Entry<String, String> entry : providerParams.entrySet()) {
+         params.add(resource.createFilterParam().name(entry.getKey().toLowerCase()).value(entry.getValue()));
+      }
+      resource.addFilter().name(getName()).role(getRole()).impl(WebHdfsHaHttpClientDispatch.class).params(params);
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
index 15322bc..cb44a88 100644
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
 import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
+import org.apache.hadoop.gateway.topology.Provider;
 import org.apache.hadoop.gateway.topology.Service;
 
 import java.io.IOException;
@@ -33,6 +34,7 @@ import java.io.Reader;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBase {
 
@@ -78,7 +80,9 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
     addRewriteFilter( context, service, rootResource, params );
     addIdentityAssertionFilter( context, service, rootResource );
     addAuthorizationFilter( context, service, rootResource );
-    addDispatchFilter( context, service, rootResource, "dispatch", "http-client" );
+    String dispatchName = getDispatchName( context );
+    String dispatchRole = "dispatch";
+    addDispatchFilter( context, service, rootResource, dispatchRole, dispatchName );
 
     ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
     fileResource.role( service.getRole() );
@@ -93,7 +97,7 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
     addRewriteFilter( context, service, fileResource, params );
     addIdentityAssertionFilter( context, service, fileResource );
     addAuthorizationFilter( context, service, fileResource );
-    addDispatchFilter( context, service, fileResource, "dispatch", "http-client" );
+    addDispatchFilter( context, service, fileResource, dispatchRole, dispatchName );
 
     ResourceDescriptor homeResource = context.getGatewayDescriptor().addResource();
     homeResource.role( service.getRole() );
@@ -106,7 +110,7 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
     addRewriteFilter( context, service, homeResource, params );
     addIdentityAssertionFilter( context, service, homeResource );
     addAuthorizationFilter( context, service, homeResource );
-    addDispatchFilter( context, service, homeResource, "dispatch", "http-client" );
+    addDispatchFilter( context, service, homeResource, dispatchRole, dispatchName );
 
     ResourceDescriptor homeFileResource = context.getGatewayDescriptor().addResource();
     homeFileResource.role( service.getRole() );
@@ -121,7 +125,7 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
     addRewriteFilter( context, service, homeFileResource, params );
     addIdentityAssertionFilter( context, service, homeFileResource );
     addAuthorizationFilter( context, service, homeFileResource );
-    addDispatchFilter( context, service, homeFileResource, "dispatch", "http-client" );
+    addDispatchFilter( context, service, homeFileResource, dispatchRole, dispatchName );
   }
 
   public void contributeDataNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
@@ -153,4 +157,19 @@ public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBa
     return rules;
   }
 
+   /**
+    * Returns the name of the dispatch to use by checking to see if 'HA' is enabled.
+    */
+   private String getDispatchName(DeploymentContext context) {
+      Provider provider = getProviderByRole(context, "ha");
+      if (provider != null && provider.isEnabled()) {
+         Map<String, String> params = provider.getParams();
+         if (params != null) {
+            if (params.containsKey(getRole())) {
+               return "ha-http-client";
+            }
+         }
+      }
+      return "http-client";
+   }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/SafeModeException.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/SafeModeException.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/SafeModeException.java
new file mode 100644
index 0000000..e45a048
--- /dev/null
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/SafeModeException.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs.dispatch;
+
+public class SafeModeException extends RuntimeException {
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/StandbyException.java
----------------------------------------------------------------------
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/StandbyException.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/StandbyException.java
new file mode 100644
index 0000000..2311485
--- /dev/null
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/StandbyException.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs.dispatch;
+
+public class StandbyException extends RuntimeException {
+}


Mime
View raw message