knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From m...@apache.org
Subject [2/2] knox git commit: KNOX-958 - Consolidate test config/drivers (Colm O hEigeartaigh via Sandeep More)
Date Mon, 05 Jun 2017 20:47:47 GMT
KNOX-958 - Consolidate test config/drivers (Colm O hEigeartaigh via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/60756840
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/60756840
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/60756840

Branch: refs/heads/master
Commit: 607568408ef78b0f07cf9d853ff9bda9ad138f06
Parents: 6d17873
Author: Sandeep More <more@apache.org>
Authored: Mon Jun 5 16:47:29 2017 -0400
Committer: Sandeep More <more@apache.org>
Committed: Mon Jun 5 16:47:29 2017 -0400

----------------------------------------------------------------------
 gateway-test-release-utils/pom.xml              |   16 -
 .../hadoop/gateway/GatewayTestConfig.java       |  218 ++--
 .../hadoop/gateway/GatewayTestDriver.java       |  232 +++-
 .../hadoop/gateway/SecureClusterTest/users.ldif |   61 -
 .../src/test/resources/users.ldif               |   61 +
 .../apache/hadoop/gateway/ShellTest/users.ldif  |   61 -
 .../webhdfs-test/src/test/resources/users.ldif  |   61 +
 gateway-test/pom.xml                            |    7 +
 .../hadoop/gateway/GatewayBasicFuncTest.java    |  867 ++++++++++++--
 .../hadoop/gateway/GatewayFuncTestDriver.java   | 1053 ------------------
 .../gateway/GatewayLocalServiceFuncTest.java    |    2 +-
 .../GatewayPortMappingDisableFeatureTest.java   |    2 +-
 .../gateway/GatewayPortMappingFailTest.java     |    2 +-
 .../gateway/GatewayPortMappingFuncTest.java     |    2 +-
 .../hadoop/gateway/GatewayTestConfig.java       |  608 ----------
 .../hadoop/gateway/WebHdfsHaFuncTest.java       |    2 +-
 16 files changed, 1262 insertions(+), 1993 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release-utils/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/pom.xml b/gateway-test-release-utils/pom.xml
index c182a10..62d2155 100644
--- a/gateway-test-release-utils/pom.xml
+++ b/gateway-test-release-utils/pom.xml
@@ -112,22 +112,6 @@
         </dependency>
 
         <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-server</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.eclipse.jetty</groupId>
-            <artifactId>jetty-servlet</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpclient</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
             <groupId>commons-io</groupId>
             <artifactId>commons-io</artifactId>
             <scope>provided</scope>

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 8d30231..eb692bc 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -17,13 +17,13 @@
  */
 package org.apache.hadoop.gateway;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.gateway.config.GatewayConfig;
 
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -51,11 +51,27 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
   private String frontendUrl = null;
   private boolean xForwardedEnabled = true;
+  private String gatewayApplicationsDir = null;
+  private String gatewayServicesDir;
+  private String defaultTopologyName = "default";
+  private List<String> includedSSLCiphers = null;
+  private List<String> excludedSSLCiphers = null;
+  private boolean sslEnabled = false;
+  private String truststoreType = "jks";
+  private String keystoreType = "jks";
+  private boolean isTopologyPortMappingEnabled = true;
+  private ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<>();
+  private int backupVersionLimit = -1;
+  private long backupAgeLimit = -1;
 
   public void setGatewayHomeDir( String gatewayHomeDir ) {
     this.gatewayHomeDir = gatewayHomeDir;
   }
 
+  public String getGatewayHomeDir() {
+    return this.gatewayHomeDir;
+  }
+
   @Override
   public String getGatewayConfDir() {
     return gatewayHomeDir;
@@ -126,15 +142,18 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
     return new InetSocketAddress( getGatewayHost(), getGatewayPort() );
   }
 
-  @Override
+
   public long getGatewayIdleTimeout() {
     return 0l;
   }
 
   @Override
   public boolean isSSLEnabled() {
-    // TODO Auto-generated method stub
-    return false;
+    return sslEnabled;
+  }
+
+  public void setSSLEnabled( boolean sslEnabled ) {
+    this.sslEnabled = sslEnabled;
   }
 
   @Override
@@ -155,31 +174,27 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
     this.kerberosConfig = kerberosConfig;
   }
 
-
   @Override
   public boolean isKerberosDebugEnabled() {
     return kerberosDebugEnabled;
   }
   
-//  public void setKerberosDebugEnabled(boolean kerberosConfigEnabled) {
-//    this.kerberosDebugEnabled = kerberosDebugEnabled;
-//  }
+  public void setKerberosDebugEnabled(boolean kerberosDebugEnabled) {
+    this.kerberosDebugEnabled = kerberosDebugEnabled;
+  }
   
   @Override
   public String getKerberosLoginConfig() {
     return kerberosLoginConfig;
   }
 
-  public void setKerberosLoginConfig(String kerberosLoginConfig) {
-    this.kerberosLoginConfig = kerberosLoginConfig;
-  }
-
-  /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultTopologyName()
-     */
   @Override
   public String getDefaultTopologyName() {
-    return "default";
+    return defaultTopologyName;
+  }
+
+  public void setDefaultTopologyName( String defaultTopologyName ) {
+    this.defaultTopologyName = defaultTopologyName;
   }
 
   /* (non-Javadoc)
@@ -187,8 +202,13 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
    */
   @Override
   public String getDefaultAppRedirectPath() {
-    // TODO Auto-generated method stub
-    return "/gateway/sandbox";
+
+    if(StringUtils.isBlank(this.defaultTopologyName)) {
+      return "/gateway/sandbox";
+    } else {
+      return "/gateway/"+this.defaultTopologyName;
+    }
+
   }
 
   /* (non-Javadoc)
@@ -197,6 +217,10 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   @Override
   public String getFrontendUrl() { return frontendUrl; }
 
+  public void setFrontendUrl( String frontendUrl ) {
+    this.frontendUrl = frontendUrl;
+  }
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.config.GatewayConfig#getExcludedSSLProtocols()
    */
@@ -209,16 +233,20 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
 
   @Override
   public List getIncludedSSLCiphers() {
-    return null;
+    return includedSSLCiphers;
+  }
+
+  public void setIncludedSSLCiphers( List<String> list ) {
+    includedSSLCiphers = list;
   }
 
   @Override
   public List getExcludedSSLCiphers() {
-    return null;
+    return excludedSSLCiphers;
   }
 
-  public void setFrontendUrl( String frontendUrl ) {
-    this.frontendUrl = frontendUrl;
+  public void setExcludedSSLCiphers( List<String> list ) {
+    excludedSSLCiphers = list;
   }
 
   /* (non-Javadoc)
@@ -253,33 +281,55 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
    */
   @Override
   public String getTruststoreType() {
-    // TODO Auto-generated method stub
-    return null;
+    return truststoreType;
   }
-  
+
+  public void setTruststoreType( String truststoreType ) {
+    this.truststoreType = truststoreType;
+  }
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.config.GatewayConfig#getKeystoreType()
    */
   @Override
   public String getKeystoreType() {
-    // TODO Auto-generated method stub
-    return null;
+    return keystoreType;
   }
 
-//  public void setKerberosLoginConfig(String kerberosLoginConfig) {
-//   this.kerberosLoginConfig = kerberosLoginConfig;
-//  }
+  public void setKeystoreType( String keystoreType ) {
+    this.keystoreType = keystoreType;
+  }
+
+  public void setKerberosLoginConfig(String kerberosLoginConfig) {
+   this.kerberosLoginConfig = kerberosLoginConfig;
+  }
 
    @Override
    public String getGatewayServicesDir() {
-      return gatewayHomeDir + "/data/services";
-   }
+    if( gatewayServicesDir != null ) {
+      return gatewayServicesDir;
+    } else {
+      return getGatewayDataDir() + "/services";
+    }
+  }
+
+  public void setGatewayServicesDir( String gatewayServicesDir ) {
+    this.gatewayServicesDir = gatewayServicesDir;
+  }
 
   @Override
   public String getGatewayApplicationsDir() {
-    return gatewayHomeDir + "/conf/applications";
+    if( gatewayApplicationsDir != null ) {
+      return gatewayApplicationsDir;
+    } else {
+      return getGatewayConfDir() + "/applications";
+    }
   }
 
+  public void setGatewayApplicationsDir( String gatewayApplicationsDir ) {
+    this.gatewayApplicationsDir = gatewayApplicationsDir;
+   }
+
   @Override
   public boolean isXForwardedEnabled() {
     return xForwardedEnabled;
@@ -337,14 +387,30 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
     return 8*1024;
   }
 
-  @Override
+  public void setGatewayDeploymentsBackupVersionLimit( int newBackupVersionLimit ) {
+    backupVersionLimit = newBackupVersionLimit;
+  }
+
   public int getGatewayDeploymentsBackupVersionLimit() {
-    return Integer.MAX_VALUE;
+    return backupVersionLimit;
+  }
+
+  public void setTopologyPortMapping(ConcurrentHashMap<String, Integer> topologyPortMapping) {
+    this.topologyPortMapping = topologyPortMapping;
+  }
+
+  public void setGatewayPortMappingEnabled(
+      boolean topologyPortMappingEnabled) {
+    isTopologyPortMappingEnabled = topologyPortMappingEnabled;
   }
 
   @Override
   public long getGatewayDeploymentsBackupAgeLimit() {
-    return Long.MAX_VALUE;
+    return backupAgeLimit;
+  }
+
+  public void setGatewayDeploymentsBackupAgeLimit( long newBackupAgeLimit ) {
+    backupAgeLimit = newBackupAgeLimit;
   }
 
   /* (non-Javadoc)
@@ -365,7 +431,14 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
 
   @Override
   public List<String> getGlobalRulesServices() {
-    return Collections.emptyList();
+    ArrayList<String> services = new ArrayList<>();
+    services.add("WEBHDFS");
+    services.add("HBASE");
+    services.add("HIVE");
+    services.add("OOZIE");
+    services.add("RESOURCEMANAGER");
+    services.add("STORM");
+    return services;
   }
 
   /* (non-Javadoc)
@@ -432,34 +505,6 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
     return DEFAULT_WEBSOCKET_IDLE_TIMEOUT;
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hadoop.gateway.config.GatewayConfig#getMimeTypesToCompress()
-   */
-  @Override
-  public List<String> getMimeTypesToCompress() {
-    return new ArrayList<String>();
-  }
-
-  /**
-   * Map of Topology names and their ports.
-   *
-   * @return
-   */
-  @Override
-  public Map<String, Integer> getGatewayPortMappings() {
-    return new ConcurrentHashMap<String, Integer>();
-  }
-
-  /**
-   * Is the Port Mapping feature on ?
-   *
-   * @return
-   */
-  @Override
-  public boolean isGatewayPortMappingEnabled() {
-    return true;
-  }
-
   @Override
   public boolean isMetricsEnabled() {
     return false;
@@ -490,9 +535,17 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
     return 0;
   }
 
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getMimeTypesToCompress()
+   */
+  @Override
+  public List<String> getMimeTypesToCompress() {
+    return new ArrayList<String>();
+  }
+
   @Override
   public  boolean isCookieScopingToPathEnabled() {
-      return false;
+    return false;
   }
 
   @Override
@@ -502,31 +555,52 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
 
   @Override
   public String getAlgorithm() {
-	return null;
+    return null;
   }
 
   @Override
   public String getPBEAlgorithm() {
-	return null;
+    return null;
   }
 
   @Override
   public String getTransformation() {
-	return null;
+    return null;
   }
 
   @Override
   public String getSaltSize() {
-	return null;
+    return null;
   }
 
   @Override
   public String getIterationCount() {
-	return null;
+    return null;
   }
 
   @Override
   public String getKeyLength() {
-	return null;
+    return null;
   }
+
+  /**
+   * Map of Topology names and their ports.
+   *
+   * @return
+   */
+  @Override
+  public Map<String, Integer> getGatewayPortMappings() {
+    return topologyPortMapping;
+  }
+
+  /**
+   * Is the Port Mapping feature on ?
+   *
+   * @return
+   */
+  @Override
+  public boolean isGatewayPortMappingEnabled() {
+    return isTopologyPortMappingEnabled;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestDriver.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestDriver.java
index c6d085c..bcbeeea 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestDriver.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestDriver.java
@@ -17,35 +17,55 @@
  */
 package org.apache.hadoop.gateway;
 
-import com.mycila.xmltool.XMLTag;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.nio.file.FileSystems;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
 import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
 import org.apache.hadoop.gateway.services.DefaultGatewayServices;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.test.mock.MockServer;
+import org.hamcrest.CoreMatchers;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.net.InetAddress;
-import java.net.URL;
-import java.net.UnknownHostException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.assertThat;
+import com.mycila.xmltool.XMLTag;
 
+/**
+ * This class was created to reduce much of the duplication and boiler plate that was ending up in the GatewayBasicFuncTest class.
+ * It basically does a number of different things.
+ * 1) Creates a GATEWAY_HOME starts a gateway instance and deployes a test topology.
+ * 2) Provides a registry of mock Hadoop services.
+ * 3) Provides "bundled" methods for common Hadoop operations to avoid duplication in tests.
+ * 4) Provides methods to access test resources.
+ */
 public class GatewayTestDriver {
 
-  private static Logger log = LoggerFactory.getLogger(GatewayTestDriver.class);
+  private static Logger log = LoggerFactory.getLogger( GatewayTestDriver.class );
 
   public Class<?> resourceBaseClass;
+  public Map<String,Service> services = new HashMap<>();
   public SimpleLdapDirectoryServer ldap;
   public TcpTransport ldapTransport;
   public boolean useGateway;
@@ -68,14 +88,30 @@ public class GatewayTestDriver {
    * @throws Exception Thrown if a failure occurs.
    */
   public int setupLdap( int port ) throws Exception {
-    URL usersUrl = getResourceUrl("users.ldif");
-    ldapTransport = new TcpTransport( 0 );
-    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/users.ldif");
+    return setupLdap( port, path.toFile() );
+  }
+  
+  public int setupLdap( int port, File ldifConfig ) throws Exception {
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", ldifConfig, ldapTransport );
     ldap.start();
     log.info( "LDAP port = " + ldapTransport.getAcceptor().getLocalAddress().getPort() );
     return port;
   }
 
+  /**
+   * Adds a mock service to the registry.
+   */
+  public void setupService( String role, String realUrl, String gatewayPath, boolean mock ) throws Exception {
+    Service service = new Service( role, realUrl, gatewayPath, mock );
+    services.put( role, service );
+    log.info( role + " port = " + service.server.getPort() );
+  }
 
   /**
    * Creates a GATEWAY_HOME, starts a gateway instance and deploys a test topology.
@@ -117,14 +153,17 @@ public class GatewayTestDriver {
     String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
     File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
     if (!stacksSourceDir.exists()) {
+      stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
+    }
+    if (!stacksSourceDir.exists()) {
       stacksSourceDir = new File( targetDir.getParentFile().getParentFile().getParent(), pathToStacksSource);
     }
     if (stacksSourceDir.exists()) {
       FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
     }
 
-    gateway = GatewayServer.startGateway(config, srvcs);
-    MatcherAssert.assertThat("Failed to start gateway.", gateway, notNullValue());
+    gateway = GatewayServer.startGateway( config, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, CoreMatchers.notNullValue() );
 
     log.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
   }
@@ -137,9 +176,97 @@ public class GatewayTestDriver {
     FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
     FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
     FileUtils.deleteQuietly( new File( config.getGatewayServicesDir() ) );
+
+    for( Service service : services.values() ) {
+      service.server.stop();
+    }
+    services.clear();
+
     ldap.stop( true );
   }
 
+  public boolean isUseGateway() {
+    return useGateway;
+  }
+
+  public MockServer getMock( String serviceRole ) {
+    Service service = services.get( serviceRole );
+    return service.server;
+  }
+
+  public String getRealUrl( String serviceRole ) {
+    return getUrl( serviceRole, true );
+  }
+
+  public String getUrl( String serviceRole ) {
+    return getUrl( serviceRole, false );
+  }
+
+  private String getLocalHostName() {
+    String hostName = "localhost";
+    try {
+      hostName = InetAddress.getByName( "127.0.0.1" ).getHostName();
+    } catch( UnknownHostException e ) {
+      // Ignore and use the default.
+    }
+    return hostName;
+  }
+
+  public String getUrl( String serviceRole, boolean real ) {
+    String url;
+    String localHostName = getLocalHostName();
+    Service service = services.get( serviceRole );
+    if( useGateway && !real ) {
+      url = "http://" + localHostName + ":" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath() + service.gatewayPath;
+    } else if( service.mock ) {
+      url = "http://" + localHostName + ":" + service.server.getPort();
+    } else {
+      url = service.realUrl.toASCIIString();
+    }
+    return url;
+  }
+
+  public String getClusterUrl() {
+    String url;
+    String localHostName = getLocalHostName();
+    url = "http://" + localHostName + ":" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath() + "/" + clusterName;
+    return url;
+  }
+
+  public int getGatewayPort() {
+    return gateway.getAddresses()[0].getPort();
+  }
+
+  public String getRealAddr( String role ) {
+    String addr;
+    String localHostName = getLocalHostName();
+    Service service = services.get( role );
+    if( service.mock ) {
+      addr = localHostName + ":" + service.server.getPort();
+    } else {
+      addr = service.realUrl.getHost() + ":" + service.realUrl.getPort();
+    }
+    return addr;
+  }
+
+  public String getLdapUrl() {
+    return "ldap://localhost:" + ldapTransport.getAcceptor().getLocalAddress().getPort();
+  }
+
+  private static class Service {
+    String role;
+    URI realUrl;
+    String gatewayPath;
+    boolean mock;
+    MockServer server;
+    private Service( String role, String realUrl, String gatewayPath, boolean mock ) throws Exception {
+      this.role = role;
+      this.realUrl = new URI( realUrl );
+      this.gatewayPath = gatewayPath;
+      this.mock = mock;
+      this.server = new MockServer( role, true );
+    }
+  }
 
   public String getResourceBaseName() {
     return resourceBaseClass.getName().replaceAll( "\\.", "/" ) + "/";
@@ -155,29 +282,66 @@ public class GatewayTestDriver {
     return url;
   }
 
-  public String getLdapUrl() {
-    return "ldap://localhost:" + ldapTransport.getAcceptor().getLocalAddress().getPort();
+  public InputStream getResourceStream( String resource ) throws IOException {
+    InputStream stream = null;
+    if( resource.startsWith( "file:/" ) ) {
+      try {
+        stream = FileUtils.openInputStream( new File( new URI( resource ) ) );
+      } catch( URISyntaxException e ) {
+        throw new IOException( e  );
+      }
+    } else {
+      stream = ClassLoader.getSystemResourceAsStream( getResourceName( resource ) );
+    }
+    assertThat( "Failed to find test resource " + resource, stream, Matchers.notNullValue() );
+    return stream;
   }
 
-  public String getClusterUrl() {
-    String url;
-    String localHostName = getLocalHostName();
-    url = "http://" + localHostName + ":" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath() + "/" + clusterName;
-    return url;
+  public byte[] getResourceBytes( String resource ) throws IOException {
+    return IOUtils.toByteArray( getResourceStream( resource ) );
   }
 
-  public int getGatewayPort() {
-    return gateway.getAddresses()[0].getPort();
+  public String getResourceString( String resource, Charset charset ) throws IOException {
+    return IOUtils.toString( getResourceBytes( resource ), "UTF-8" );
   }
 
-  private String getLocalHostName() {
-    String hostName = "localhost";
-    try {
-      hostName = InetAddress.getByName("127.0.0.1").getHostName();
-    } catch( UnknownHostException e ) {
-      // Ignore and use the default.
+  public void assertComplete() {
+    // Check to make sure that all interaction were satisfied if for mocked services.
+    // Otherwise just clear the mock interaction queue.
+    for( Service service : services.values() ) {
+      if( service.mock ) {
+        assertThat(
+            "Service " + service.role + " has remaining expected interactions.",
+            service.server.getCount(), Matchers.is(0) );
+      }
+      service.server.reset();
     }
-    return hostName;
   }
 
+
+  public void assertNotComplete(String serviceName) {
+    // Check to make sure that all interaction were satisfied if for mocked services.
+    // Otherwise just clear the mock interaction queue.
+
+    Service service = services.get(serviceName);
+
+    if(service != null) {
+      if(service.mock) {
+        assertThat(
+            "Service " + service.role + " has remaining expected interactions.",
+            service.server.getCount(), Matchers.not(0));
+      }
+      service.server.reset();
+    } else {
+      fail();
+    }
+  }
+
+  public void reset() {
+    for( Service service : services.values() ) {
+      service.server.reset();
+    }
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release/webhdfs-kerb-test/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-kerb-test/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif b/gateway-test-release/webhdfs-kerb-test/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
deleted file mode 100644
index d82e99a..0000000
--- a/gateway-test-release/webhdfs-kerb-test/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
+++ /dev/null
@@ -1,61 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-version: 1
-
-dn: dc=hadoop,dc=apache,dc=org
-objectclass: organization
-objectclass: dcObject
-o: Hadoop
-dc: hadoop
-
-dn: ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:organizationalUnit
-ou: people
-
-dn: ou=groups,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:organizationalUnit
-ou: groups
-
-dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:person
-objectclass:organizationalPerson
-objectclass:inetOrgPerson
-cn: LarryWalls
-sn: Walls
-uid: hdfs
-userPassword:hdfs-password
-
-dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:person
-objectclass:organizationalPerson
-objectclass:inetOrgPerson
-cn: Guest
-sn: Guest
-uid: guest
-userPassword:guest-password
-
-dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:groupOfNames
-cn: admin
-member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release/webhdfs-kerb-test/src/test/resources/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-kerb-test/src/test/resources/users.ldif b/gateway-test-release/webhdfs-kerb-test/src/test/resources/users.ldif
new file mode 100644
index 0000000..d82e99a
--- /dev/null
+++ b/gateway-test-release/webhdfs-kerb-test/src/test/resources/users.ldif
@@ -0,0 +1,61 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: LarryWalls
+sn: Walls
+uid: hdfs
+userPassword:hdfs-password
+
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: Guest
+uid: guest
+userPassword:guest-password
+
+dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:groupOfNames
+cn: admin
+member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
deleted file mode 100644
index d82e99a..0000000
--- a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
+++ /dev/null
@@ -1,61 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-version: 1
-
-dn: dc=hadoop,dc=apache,dc=org
-objectclass: organization
-objectclass: dcObject
-o: Hadoop
-dc: hadoop
-
-dn: ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:organizationalUnit
-ou: people
-
-dn: ou=groups,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:organizationalUnit
-ou: groups
-
-dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:person
-objectclass:organizationalPerson
-objectclass:inetOrgPerson
-cn: LarryWalls
-sn: Walls
-uid: hdfs
-userPassword:hdfs-password
-
-dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:person
-objectclass:organizationalPerson
-objectclass:inetOrgPerson
-cn: Guest
-sn: Guest
-uid: guest
-userPassword:guest-password
-
-dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
-objectclass:top
-objectclass:groupOfNames
-cn: admin
-member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test-release/webhdfs-test/src/test/resources/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/users.ldif b/gateway-test-release/webhdfs-test/src/test/resources/users.ldif
new file mode 100644
index 0000000..d82e99a
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/resources/users.ldif
@@ -0,0 +1,61 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: LarryWalls
+sn: Walls
+uid: hdfs
+userPassword:hdfs-password
+
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: Guest
+uid: guest
+userPassword:guest-password
+
+dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:groupOfNames
+cn: admin
+member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test/pom.xml b/gateway-test/pom.xml
index 0a9d167..f39f2d1 100644
--- a/gateway-test/pom.xml
+++ b/gateway-test/pom.xml
@@ -59,6 +59,13 @@
         </dependency>
 
         <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-release-utils</artifactId>
+            <scope>test</scope>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
             <groupId>org.eclipse.jetty</groupId>
             <artifactId>jetty-servlet</artifactId>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/knox/blob/60756840/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 2108bea..d7e99c5 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -27,13 +27,17 @@ import java.io.StringWriter;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
 import java.nio.charset.Charset;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 import javax.ws.rs.core.MediaType;
 
+import com.jayway.restassured.RestAssured;
 import com.jayway.restassured.http.ContentType;
+import com.jayway.restassured.path.json.JsonPath;
 import com.jayway.restassured.response.Cookie;
 import com.jayway.restassured.response.Header;
 import com.jayway.restassured.response.Response;
@@ -41,17 +45,33 @@ import com.jayway.restassured.specification.ResponseSpecification;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.hadoop.gateway.util.KnoxCLI;
 import org.apache.hadoop.test.TestUtils;
 import org.apache.hadoop.test.category.MediumTests;
 import org.apache.hadoop.test.category.VerifyTest;
 import org.apache.hadoop.test.mock.MockRequestMatcher;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
 import org.apache.http.HttpStatus;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.AuthCache;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.protocol.ClientContext;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.impl.client.BasicAuthCache;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.protocol.BasicHttpContext;
+import org.apache.http.util.EntityUtils;
 import org.apache.velocity.Template;
 import org.apache.velocity.VelocityContext;
 import org.apache.velocity.app.VelocityEngine;
 import org.apache.velocity.runtime.RuntimeConstants;
 import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
+import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matcher;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
@@ -68,10 +88,10 @@ import static com.jayway.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.*;
-import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.greaterThan;
 import static org.hamcrest.text.IsEmptyString.isEmptyString;
+import static org.junit.Assert.assertThat;
 import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
 import static org.xmlmatchers.transform.XmlConverters.the;
 import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
@@ -92,7 +112,7 @@ public class GatewayBasicFuncTest {
 
   private static Logger log = LoggerFactory.getLogger( GatewayBasicFuncTest.class );
 
-  public static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
+  public static GatewayTestDriver driver = new GatewayTestDriver();
 
   // Controls the host name to which the gateway dispatch requests.  This may be the name of a sandbox VM
   // or an EC2 instance.  Currently only a single host is supported.
@@ -736,73 +756,73 @@ public class GatewayBasicFuncTest {
     String groupAB = "hadoop";
     String groupC = "hcat";
 
-    driver.deleteFile( userA, passA, root, "true", 200 );
+    deleteFile( userA, passA, root, "true", 200 );
 
-    driver.createDir( userA, passA, groupA, root + "/dirA700", "700", 200, 200 );
-    driver.createDir( userA, passA, groupA, root + "/dirA770", "770", 200, 200 );
-    driver.createDir( userA, passA, groupA, root + "/dirA707", "707", 200, 200 );
-    driver.createDir( userA, passA, groupA, root + "/dirA777", "777", 200, 200 );
-    driver.createDir( userA, passA, groupAB, root + "/dirAB700", "700", 200, 200 );
-    driver.createDir( userA, passA, groupAB, root + "/dirAB770", "770", 200, 200 );
-    driver.createDir( userA, passA, groupAB, root + "/dirAB707", "707", 200, 200 );
-    driver.createDir( userA, passA, groupAB, root + "/dirAB777", "777", 200, 200 );
+    createDir( userA, passA, groupA, root + "/dirA700", "700", 200, 200 );
+    createDir( userA, passA, groupA, root + "/dirA770", "770", 200, 200 );
+    createDir( userA, passA, groupA, root + "/dirA707", "707", 200, 200 );
+    createDir( userA, passA, groupA, root + "/dirA777", "777", 200, 200 );
+    createDir( userA, passA, groupAB, root + "/dirAB700", "700", 200, 200 );
+    createDir( userA, passA, groupAB, root + "/dirAB770", "770", 200, 200 );
+    createDir( userA, passA, groupAB, root + "/dirAB707", "707", 200, 200 );
+    createDir( userA, passA, groupAB, root + "/dirAB777", "777", 200, 200 );
 
     // CREATE: Files
     // userA:groupA
-    driver.createFile( userA, passA, groupA, root + "/dirA700/fileA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupA, root + "/dirA770/fileA770", "770", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupA, root + "/dirA707/fileA707", "707", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupA, root + "/dirA777/fileA777", "777", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupA, root + "/dirA700/fileA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupA, root + "/dirA770/fileA770", "770", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupA, root + "/dirA707/fileA707", "707", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupA, root + "/dirA777/fileA777", "777", "text/plain", "small1.txt", 307, 201, 200 );
     // userA:groupAB
-    driver.createFile( userA, passA, groupAB, root + "/dirAB700/fileAB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupAB, root + "/dirAB770/fileAB770", "770", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupAB, root + "/dirAB707/fileAB707", "707", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userA, passA, groupAB, root + "/dirAB777/fileAB777", "777", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupAB, root + "/dirAB700/fileAB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupAB, root + "/dirAB770/fileAB770", "770", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupAB, root + "/dirAB707/fileAB707", "707", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userA, passA, groupAB, root + "/dirAB777/fileAB777", "777", "text/plain", "small1.txt", 307, 201, 200 );
     // userB:groupB
-    driver.createFile( userB, passB, groupB, root + "/dirA700/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
-    driver.createFile( userB, passB, groupB, root + "/dirA770/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userB, passB, groupB, root + "/dirA700/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userB, passB, groupB, root + "/dirA770/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 //kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
-//    driver.createFile( userB, passB, groupB, root + "/dirA707/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
-//    driver.createFile( userB, passB, groupB, root + "/dirA777/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+//    createFile( userB, passB, groupB, root + "/dirA707/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+//    createFile( userB, passB, groupB, root + "/dirA777/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 //kam]
     // userB:groupAB
-    driver.createFile( userB, passB, groupAB, root + "/dirA700/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
-    driver.createFile( userB, passB, groupAB, root + "/dirA770/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
-    driver.createFile( userB, passB, groupAB, root + "/dirA707/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
-    driver.createFile( userB, passB, groupAB, root + "/dirA777/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userB, passB, groupAB, root + "/dirA700/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userB, passB, groupAB, root + "/dirA770/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userB, passB, groupAB, root + "/dirA707/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+    createFile( userB, passB, groupAB, root + "/dirA777/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
     // userC:groupC
-    driver.createFile( userC, passC, groupC, root + "/dirA700/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
-    driver.createFile( userC, passC, groupC, root + "/dirA770/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userC, passC, groupC, root + "/dirA700/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
+    createFile( userC, passC, groupC, root + "/dirA770/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 //kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
-//    driver.createFile( userC, passC, groupC, root + "/dirA707/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
-//    driver.createFile( userC, passC, groupC, root + "/dirA777/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+//    createFile( userC, passC, groupC, root + "/dirA707/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
+//    createFile( userC, passC, groupC, root + "/dirA777/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 //kam]
 
     // READ
     // userA
-    driver.readFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_OK );
-    driver.readFile( userA, passA, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_OK );
-    driver.readFile( userA, passA, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
-    driver.readFile( userA, passA, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userA, passA, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userA, passA, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userA, passA, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
     // userB:groupB
-    driver.readFile( userB, passB, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userB, passB, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userB, passB, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
-    driver.readFile( userB, passB, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userB, passB, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userB, passB, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userB, passB, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userB, passB, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
     // userB:groupAB
-    driver.readFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userB, passB, root + "/dirAB707/fileAB707", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userB, passB, root + "/dirAB777/fileAB777", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userB, passB, root + "/dirAB707/fileAB707", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userB, passB, root + "/dirAB777/fileAB777", "text/plain", "small1.txt", HttpStatus.SC_OK );
     // userC:groupC
-    driver.readFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userC, passC, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
-    driver.readFile( userC, passC, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
-    driver.readFile( userC, passC, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userC, passC, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
+    readFile( userC, passC, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
+    readFile( userC, passC, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 
     //NEGATIVE: Test a bad password.
     if( driver.isUseGateway() ) {
-      Response response = given()
+      given()
           //.log().all()
           .auth().preemptive().basic( userA, "invalid-password" )
           .header("X-XSRF-Header", "jksdhfkhdsf")
@@ -815,20 +835,20 @@ public class GatewayBasicFuncTest {
     driver.assertComplete();
 
     // UPDATE (Negative First)
-    driver.updateFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 403 );
-    driver.updateFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small2.txt", 307, 403 );
-    driver.updateFile( userB, passB, root + "/dirAB770/fileAB700", "text/plain", "small2.txt", 307, 403 );
-    driver.updateFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small2.txt", 307, 403 );
-    driver.updateFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 201 );
+    updateFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 403 );
+    updateFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small2.txt", 307, 403 );
+    updateFile( userB, passB, root + "/dirAB770/fileAB700", "text/plain", "small2.txt", 307, 403 );
+    updateFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small2.txt", 307, 403 );
+    updateFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 201 );
 
     // DELETE (Negative First)
-    driver.deleteFile( userC, passC, root + "/dirA700/fileA700", "false", HttpStatus.SC_FORBIDDEN );
-    driver.deleteFile( userB, passB, root + "/dirAB700/fileAB700", "false", HttpStatus.SC_FORBIDDEN );
-    driver.deleteFile( userB, passB, root + "/dirAB770/fileAB770", "false", HttpStatus.SC_FORBIDDEN );
-    driver.deleteFile( userA, passA, root + "/dirA700/fileA700", "false", HttpStatus.SC_OK );
+    deleteFile( userC, passC, root + "/dirA700/fileA700", "false", HttpStatus.SC_FORBIDDEN );
+    deleteFile( userB, passB, root + "/dirAB700/fileAB700", "false", HttpStatus.SC_FORBIDDEN );
+    deleteFile( userB, passB, root + "/dirAB770/fileAB770", "false", HttpStatus.SC_FORBIDDEN );
+    deleteFile( userA, passA, root + "/dirA700/fileA700", "false", HttpStatus.SC_OK );
 
     // Cleanup anything that might have been leftover because the test failed previously.
-    driver.deleteFile( userA, passA, root, "true", HttpStatus.SC_OK );
+    deleteFile( userA, passA, root, "true", HttpStatus.SC_OK );
     LOG_EXIT();
   }
 
@@ -844,28 +864,28 @@ public class GatewayBasicFuncTest {
 //    String group = "hcat";
 
     // Cleanup anything that might have been leftover because the test failed previously.
-    driver.deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
+    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 
     /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
     curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
+    createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 
     /* Put the data file into HDFS (changes.txt)
     curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, null, root+"/input/changes.txt", "777", "text/plain", "changes.txt", 307, 201, 200 );
+    createFile( user, pass, null, root+"/input/changes.txt", "777", "text/plain", "changes.txt", 307, 201, 200 );
 
     /* Create the output directory
     curl -X PUT 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/output?op=MKDIRS&user.name=hdfs'
     */
-    driver.createDir( user, pass, null, root+"/output", "777", 200, 200 );
+    createDir( user, pass, null, root+"/output", "777", 200, 200 );
 
     /* Submit the job
     curl -d user.name=hdfs -d jar=wordcount/hadoop-examples.jar -d class=org.apache.org.apache.hadoop.examples.WordCount -d arg=wordcount/input -d arg=wordcount/output 'http://localhost:8888/org.apache.org.apache.hadoop.gateway/cluster/templeton/v1/mapreduce/jar'
     {"id":"job_201210301335_0059"}
     */
-    String job = driver.submitJava(
+    String job = submitJava(
         user, pass,
         root+"/hadoop-examples.jar", "org.apache.org.apache.hadoop.examples.WordCount",
         root+"/input", root+"/output",
@@ -874,7 +894,7 @@ public class GatewayBasicFuncTest {
     /* Get the job status
     curl 'http://vm:50111/templeton/v1/queue/:jobid?user.name=hdfs'
     */
-    driver.queryQueue( user, pass, job );
+    queryQueue( user, pass, job );
 
     // Can't really check for the output here because the job won't be done.
     /* Retrieve results
@@ -883,7 +903,7 @@ public class GatewayBasicFuncTest {
 
     if( CLEANUP_TEST ) {
       // Cleanup anything that might have been leftover because the test failed previously.
-      driver.deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
+      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
     }
     LOG_EXIT();
   }
@@ -897,25 +917,25 @@ public class GatewayBasicFuncTest {
     String group = "mapred";
 
     // Cleanup if previous run failed.
-    driver.deleteFile( user, pass, root, "true", 200, 404 );
+    deleteFile( user, pass, root, "true", 200, 404 );
 
     // Post the data to HDFS
-    driver.createFile( user, pass, null, root + "/passwd.txt", "777", "text/plain", "passwd.txt", 307, 201, 200 );
+    createFile( user, pass, null, root + "/passwd.txt", "777", "text/plain", "passwd.txt", 307, 201, 200 );
 
     // Post the script to HDFS
-    driver.createFile( user, pass, null, root+"/script.pig", "777", "text/plain", "script.pig", 307, 201, 200 );
+    createFile( user, pass, null, root+"/script.pig", "777", "text/plain", "script.pig", 307, 201, 200 );
 
     // Create the output directory
-    driver.createDir( user, pass, null, root + "/output", "777", 200, 200 );
+    createDir( user, pass, null, root + "/output", "777", 200, 200 );
 
     // Submit the job
-    driver.submitPig( user, pass, group, root + "/script.pig", "-v", root + "/output", 200 );
+    submitPig( user, pass, group, root + "/script.pig", "-v", root + "/output", 200 );
 
     // Check job status (if possible)
     // Check output (if possible)
 
     // Cleanup
-    driver.deleteFile( user, pass, root, "true", 200 );
+    deleteFile( user, pass, root, "true", 200 );
     LOG_EXIT();
   }
 
@@ -928,21 +948,21 @@ public class GatewayBasicFuncTest {
     String root = "/tmp/GatewayWebHCatFuncTest/testHiveViaWebHCat";
 
     // Cleanup if previous run failed.
-    driver.deleteFile( user, pass, root, "true", 200, 404 );
+    deleteFile( user, pass, root, "true", 200, 404 );
 
     // Post the data to HDFS
 
     // Post the script to HDFS
-    driver.createFile(user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200);
+    createFile(user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200);
 
     // Submit the job
-    driver.submitHive(user, pass, group, root + "/script.hive", root + "/output", 200);
+    submitHive(user, pass, group, root + "/script.hive", root + "/output", 200);
 
     // Check job status (if possible)
     // Check output (if possible)
 
     // Cleanup
-    driver.deleteFile( user, pass, root, "true", 200 );
+    deleteFile( user, pass, root, "true", 200 );
     LOG_EXIT();
   }
 
@@ -955,20 +975,20 @@ public class GatewayBasicFuncTest {
     String group = "hdfs";
 
     // Cleanup anything that might have been leftover because the test failed previously.
-    driver.deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
+    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 
     /* Put the workflow definition into HDFS */
-    driver.createFile( user, pass, group, root+"/workflow.xml", "666", "application/octet-stream", "oozie-workflow.xml", 307, 201, 200 );
+    createFile( user, pass, group, root+"/workflow.xml", "666", "application/octet-stream", "oozie-workflow.xml", 307, 201, 200 );
 
     /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
     curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
+    createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 
     /* Put the data file into HDFS (changes.txt)
     curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.hadoop.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
      */
-    driver.createFile( user, pass, group, root+"/input/changes.txt", "666", "text/plain", "changes.txt", 307, 201, 200 );
+    createFile( user, pass, group, root+"/input/changes.txt", "666", "text/plain", "changes.txt", 307, 201, 200 );
 
     VelocityEngine velocity = new VelocityEngine();
     velocity.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
@@ -995,7 +1015,7 @@ public class GatewayBasicFuncTest {
     //System.out.println( "REQUEST=" + request );
 
     /* Submit the job via Oozie. */
-    String id = driver.oozieSubmitJob( user, pass, request, 201 );
+    String id = oozieSubmitJob( user, pass, request, 201 );
     //System.out.println( "ID=" + id );
 
     String success = "SUCCEEDED";
@@ -1004,7 +1024,7 @@ public class GatewayBasicFuncTest {
     long limit = 1000 * 60; // 60 seconds.
     long start = System.currentTimeMillis();
     while( System.currentTimeMillis() <= start+limit ) {
-      status = driver.oozieQueryJobStatus( user, pass, id, 200 );
+      status = oozieQueryJobStatus( user, pass, id, 200 );
       //System.out.println( "Status=" + status );
       if( success.equalsIgnoreCase( status ) ) {
         break;
@@ -1018,7 +1038,7 @@ public class GatewayBasicFuncTest {
 
     if( CLEANUP_TEST ) {
       // Cleanup anything that might have been leftover because the test failed previously.
-      driver.deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
+      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
     }
     LOG_EXIT();
   }
@@ -3310,7 +3330,7 @@ public class GatewayBasicFuncTest {
         .status(HttpStatus.SC_MOVED_TEMPORARILY)
         .contentType(ContentType.JSON.toString());
 
-    Response response = given()
+    given()
         .auth().preemptive().basic(username, password)
         .header("X-XSRF-Header", "jksdhfkhdsf")
         .header("X-CSRF-Token", "H/8xIWCYQo4ZDWLvV9k0FAkjD0omWI8beVTp2mEPRxCbJmWBTYhRMhIV9LGIY3E51OAj+s6T7eQChpGJ")
@@ -3729,5 +3749,686 @@ public class GatewayBasicFuncTest {
       System.out.println(e.getMessage());
     }
   }
+  
+  private String createFileNN( String user, String password, String file, String permsOctal, int status ) throws IOException {
+    if( status == HttpStatus.SC_TEMPORARY_REDIRECT ) {
+      driver.getMock( "WEBHDFS" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( "/v1" + file )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .respond()
+          .status( status )
+          .header( "Location", driver.getRealUrl("DATANODE") + file + "?op=CREATE&user.name="+user );
+    } else {
+      driver.getMock( "WEBHDFS" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( "/v1" + file )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .respond()
+          .status( status );
+    }
+    Response response = given()
+        //.log().headers()
+        //.log().parameters()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "CREATE" )
+        .queryParam( "permission", permsOctal )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    String location = response.getHeader( "Location" );
+    log.trace( "Redirect location: " + response.getHeader( "Location" ) );
+    return location;
+  }
+
+  private int createFileDN( String user, String password, String path, String location, String contentType, String resource, int status ) throws IOException {
+    if( status == HttpStatus.SC_CREATED ) {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( path )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .contentType( contentType )
+          .content( driver.getResourceBytes( resource ) )
+          .respond()
+          .status( status )
+          .header( "Location", "webhdfs://" + driver.getRealAddr( "DATANODE" ) + "/v1" + path );
+    } else {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( path )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .contentType( contentType )
+          .content( driver.getResourceStream( resource ) )
+          .respond()
+          .status( status );
+    }
+    Response response = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .contentType( contentType )
+        .content( driver.getResourceBytes( resource ) )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( location );
+    return response.getStatusCode();
+  }
+
+  private String createFile(
+        String user, String password, String group, String file, String permsOctal, String contentType, String resource,
+        int nnStatus, int dnStatus, int chownStatus ) throws IOException {
+    String location = createFileNN( user, password, file, permsOctal, nnStatus );
+    if( location != null ) {
+      int status = createFileDN( user, password, file, location, contentType, resource, dnStatus );
+      if( status < 300 && permsOctal != null ) {
+        chmodFile( user, password, file, permsOctal, chownStatus );
+        if( group != null ) {
+          chownFile( user, password, file, user, group, chownStatus );
+        }
+      }
+    }
+    driver.assertComplete();
+    return location;
+  }
+
+  private void readFile( String user, String password, String file, String contentType, String resource, int status ) throws IOException {
+    driver.getMock( "WEBHDFS" )
+        .expect()
+        .method( "GET" )
+        .pathInfo( "/v1" + file )
+        .queryParam( "user.name", user )
+        .queryParam( "op", "OPEN" )
+        .respond()
+        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
+        .header( "Location", driver.getRealUrl( "DATANODE" ) + file + "?op=OPEN&user.name="+user );
+    if( status == HttpStatus.SC_OK ) {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "GET" )
+          .pathInfo( file )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "OPEN" )
+          .respond()
+          .status( status )
+          .contentType( contentType )
+          .content( driver.getResourceBytes( resource ) );
+    } else {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "GET" )
+          .pathInfo( file )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "OPEN" )
+          .respond()
+          .status( status );
+    }
+    Response response = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "OPEN" )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().get( driver.getUrl("WEBHDFS") + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    if( response.getStatusCode() == HttpStatus.SC_OK ) {
+      String actualContent = response.asString();
+      String expectedContent = driver.getResourceString( resource, Charset.forName("UTF-8") );
+      assertThat( actualContent, Matchers.is(expectedContent) );
+    }
+    driver.assertComplete();
+  }
+
+  private void chownFile( String user, String password, String file, String owner, String group, int status ) {
+    driver.getMock( "WEBHDFS" )
+        .expect()
+        .method( "PUT" )
+        .pathInfo( "/v1" + file )
+        .queryParam( "op", "SETOWNER" )
+        .queryParam( "user.name", user )
+        .queryParam( "owner", owner )
+        .queryParam( "group", group )
+        .respond()
+        .status( HttpStatus.SC_OK );
+    given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "SETOWNER" )
+        .queryParam( "owner", owner )
+        .queryParam( "group", group )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( driver.getUrl("WEBHDFS") + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    driver.assertComplete();
+  }
+
+  private void chmodFile( String user, String password, String file, String permsOctal, int status ) {
+    driver.getMock( "WEBHDFS" )
+        .expect()
+        .method( "PUT" )
+        .pathInfo( "/v1" + file )
+        .queryParam( "op", "SETPERMISSION" )
+        .queryParam( "user.name", user )
+        .queryParam( "permission", permsOctal )
+        .respond()
+        .status( HttpStatus.SC_OK );
+    given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "SETPERMISSION" )
+        .queryParam( "permission", permsOctal )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( driver.getUrl("WEBHDFS") + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    driver.assertComplete();
+  }
+
+  private String updateFile( String user, String password, String file, String contentType, String resource, int nnStatus, int dnStatus ) throws IOException {
+    String location;
+    location = updateFileNN( user, password, file, resource, nnStatus );
+    if( location != null ) {
+      updateFileDN( user, password, file, location, contentType, resource, dnStatus );
+    }
+    driver.assertComplete();
+    return location;
+  }
+
+  private String updateFileNN( String user, String password, String file, String resource, int status ) throws IOException {
+    if( status == HttpStatus.SC_TEMPORARY_REDIRECT ) {
+      driver.getMock( "WEBHDFS" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( "/v1" + file )
+          .queryParam( "op", "CREATE" )
+          .queryParam( "user.name", user )
+          .queryParam( "overwrite", "true" )
+          .respond()
+          .status( status )
+          .header( "Location", driver.getRealUrl("DATANODE") + file + "?op=CREATE&user.name="+user );
+    } else {
+      driver.getMock( "WEBHDFS" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( "v1" + file )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .respond()
+          .status( status );
+    }
+    Response response = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "CREATE" )
+        .queryParam( "overwrite", "true" )
+        .content( driver.getResourceBytes( resource ) )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( driver.getUrl("WEBHDFS") + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    String location = response.getHeader( "Location" );
+    log.trace( "Redirect location: " + response.getHeader( "Location" ) );
+    return location;
+  }
+
+  private void updateFileDN( String user, String password, String path, String location, String contentType, String resource, int status ) throws IOException {
+    if( status == HttpStatus.SC_CREATED ) {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( path )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .contentType( contentType )
+          .content( driver.getResourceBytes( resource ) )
+          .respond()
+          .status( status )
+          .header( "Location", "webhdfs://" + driver.getRealAddr( "DATANODE" ) + "/v1" + path );
+    } else {
+      driver.getMock( "DATANODE" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( path )
+          .queryParam( "user.name", user )
+          .queryParam( "op", "CREATE" )
+          .contentType( contentType )
+          .content( driver.getResourceBytes( resource ) )
+          .respond()
+          .status( status );
+    }
+    given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "CREATE" )
+        .queryParam( "overwrite", "true" )
+        .contentType( contentType )
+        .content( driver.getResourceBytes( resource ) )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().put( location );
+  }
+
+  private void deleteFile( String user, String password, String file, String recursive, int... status ) {
+    driver.getMock( "WEBHDFS" )
+        .expect()
+        .method( "DELETE" )
+        .pathInfo( "/v1" + file )
+        .queryParam( "user.name", user )
+        .queryParam( "op", "DELETE" )
+        .queryParam( "recursive", recursive )
+        .respond().status( status[0] );
+    given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "DELETE" )
+        .queryParam( "recursive", recursive )
+        .expect()
+        //.log().all()
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
+        .when()
+        .delete( driver.getUrl( "WEBHDFS" ) + "/v1" + file + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    driver.assertComplete();
+  }
+
+  private String createDir( String user, String password, String dir, String permsOctal, int status ) {
+    driver.getMock( "WEBHDFS" )
+        .expect()
+        .method( "PUT" )
+        .pathInfo( "/v1" + dir )
+        .queryParam( "op", "MKDIRS" )
+        .queryParam( "user.name", user )
+        .queryParam( "permission", permsOctal )
+        .respond()
+        .status( HttpStatus.SC_OK )
+        .contentType( "application/json" )
+        .content( "{\"boolean\": true}".getBytes() );
+    Response response = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .queryParam( "op", "MKDIRS" )
+        .queryParam( "permission", permsOctal )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .contentType( "application/json" )
+        .content( "boolean", CoreMatchers.equalTo(true) )
+        .when()
+        .put( driver.getUrl("WEBHDFS") + "/v1" + dir + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    String location = response.getHeader( "Location" );
+    return location;
+  }
+
+  private String createDir( String user, String password, String group, String dir, String permsOctal, int nnStatus, int chownStatus ) {
+    String location = createDir( user, password, dir, permsOctal, nnStatus );
+    if( location != null ) {
+      chownFile( user, password, dir, user, group, chownStatus );
+    }
+    return location;
+  }
+
+  private String submitJava( String user, String password, String jar, String main, String input, String output, int status ) {
+    driver.getMock( "WEBHCAT" )
+        .expect()
+        .method( "POST" )
+        .pathInfo( "/v1/mapreduce/jar" )
+        .formParam( "user.name", user )
+        .formParam( "jar", jar )
+        .formParam( "class", main )
+        .formParam( "arg", input, output )
+        .respond()
+        .status( status )
+        .contentType( "application/json" )
+        .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
+    String json = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .formParam( "user.name", user )
+        .formParam( "jar", jar )    //"/user/hdfs/test/hadoop-examples.jar" )
+        .formParam( "class", main ) //"org.apache.org.apache.hadoop.examples.WordCount" )
+        .formParam( "arg", input, output ) //.formParam( "arg", "/user/hdfs/test/input", "/user/hdfs/test/output" )
+        .expect()
+        //.log().all()
+        .statusCode( status )
+        .when().post( driver.getUrl( "WEBHCAT" ) + "/v1/mapreduce/jar" + ( driver.isUseGateway() ? "" : "?user.name=" + user ) ).asString();
+    log.trace( "JSON=" + json );
+    String job = JsonPath.from(json).getString( "id" );
+    log.debug( "JOB=" + job );
+    driver.assertComplete();
+    return job;
+  }
+
+  private String submitPig( String user, String password, String group, String file, String arg, String statusDir, int... status ) {
+    driver.getMock( "WEBHCAT" )
+        .expect()
+        .method( "POST" )
+        .pathInfo( "/v1/pig" )
+        .respond()
+        .status( status[0] )
+        .contentType( "application/json" )
+        .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
+    String json = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        //BUG: The identity asserter needs to check for this too.
+        .formParam( "user.name", user )
+        .formParam( "group", group )
+        .formParam( "file", file )
+        .formParam( "arg", arg )
+        .formParam( "statusdir", statusDir )
+        .expect()
+        //.log().all();
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
+        .contentType( "application/json" )
+        //.content( "boolean", equalTo( true ) )
+        .when()
+        .post( driver.getUrl( "WEBHCAT" ) + "/v1/pig" + ( driver.isUseGateway() ? "" : "?user.name=" + user ) )
+        .asString();
+    log.trace( "JSON=" + json );
+    String job = JsonPath.from(json).getString( "id" );
+    log.debug( "JOB=" + job );
+    driver.assertComplete();
+    return job;
+  }
+
+  private String submitHive( String user, String password, String group, String file, String statusDir, int... status ) {
+    driver.getMock( "WEBHCAT" )
+        .expect()
+        .method( "POST" )
+        .pathInfo( "/v1/hive" )
+        .respond()
+        .status( status[ 0 ] )
+        .contentType( "application/json" )
+        .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
+    String json = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .formParam( "user.name", user )
+        .formParam( "group", group )
+        .formParam( "group", group )
+        .formParam( "file", file )
+        .formParam( "statusdir", statusDir )
+        .expect()
+        //.log().all()
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
+        .contentType( "application/json" )
+        //.content( "boolean", equalTo( true ) )
+        .when()
+        .post( driver.getUrl( "WEBHCAT" ) + "/v1/hive" + ( driver.isUseGateway() ? "" : "?user.name=" + user ) )
+        .asString();
+    log.trace( "JSON=" + json );
+    String job = JsonPath.from(json).getString( "id" );
+    log.debug( "JOB=" + job );
+    driver.assertComplete();
+    return job;
+  }
+
+  private void queryQueue( String user, String password, String job ) throws IOException {
+    driver.getMock( "WEBHCAT" )
+        .expect()
+        .method( "GET" )
+        .pathInfo( "/v1/jobs/" + job )
+        .respond()
+        .status( HttpStatus.SC_OK )
+        .content( driver.getResourceBytes( "webhcat-job-status.json" ) )
+        .contentType( "application/json" );
+    String status = given()
+        //.log().all()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .pathParam( "job", job )
+        .expect()
+        //.log().all()
+        .content( "status.jobId", CoreMatchers.equalTo(job) )
+        .statusCode( HttpStatus.SC_OK )
+        .when().get( driver.getUrl( "WEBHCAT" ) + "/v1/jobs/{job}" + ( driver.isUseGateway() ? "" : "?user.name=" + user ) ).asString();
+    log.debug( "STATUS=" + status );
+    driver.assertComplete();
+  }
+
+  /* GET /oozie/versions
+  HTTP/1.1 200 OK
+  Content-Type: application/json;charset=UTF-8
+  Content-Length: 5
+  Server: Apache-Coyote/1.1
+  Date: Thu, 14 Feb 2013 15:47:51 GMT
+  See: oozie-versions.json
+  */
+  private void oozieGetVersions( String user, String password ) throws IOException {
+    given()
+        .auth().preemptive().basic( user, password )
+        .header( "X-XSRF-Header", "jksdhfkhdsf" )
+        .expect()
+        .statusCode( 200 )
+        .body( "", Matchers.hasItems(0, 1) )
+        .when().get( driver.getUrl( "OOZIE" ) + "/versions" + ( driver.isUseGateway() ? "" : "?user.name=" + user ) ).asString();
+  }
+
+  /* GET /oozie/v1/admin/status
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 23
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 15:49:16 GMT
+    See: oozie-admin-status.json
+   */
+
+  /* PUT /oozie/v1/admin/status?safemode=true
+  TODO
+  */
+
+  /* GET /oozie/v1/admin/os-env
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 2039
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 15:51:56 GMT
+    See: oozie-admin-os-env.json
+   */
+
+  /* GET /oozie/v1/admin/java-sys-properties
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 3673
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 15:53:00 GMT
+    See: oozie-admin-java-sys-properties.json
+  */
+
+  /* GET /oozie/v1/admin/configuration
+    HTTP/1.1 200 OK
+    Transfer-Encoding: Identity
+    Content-Type: application/json;charset=UTF-8
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 15:53:31 GMT
+    See: oozie-admin-configuration.json
+  */
+
+  /* GET /oozie/v1/admin/instrumentation
+    HTTP/1.1 200 OK
+    Transfer-Encoding: Identity
+    Content-Type: application/json;charset=UTF-8
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 15:55:43 GMT
+    See: oozie-admin-instrumentation.json
+  */
+
+  /* GET /oozie/v1/admin/build-version
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 27
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 16:08:31 GMT
+    See: oozie-admin-build-version.json
+  */
+
+  /* POST /oozie/v1/jobs (request XML; contains URL, response JSON)
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 45
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 18:10:52 GMT
+  */
+  private String oozieSubmitJob( String user, String password, String request, int status ) throws IOException, URISyntaxException {
+    driver.getMock( "OOZIE" )
+        .expect()
+        .method( "POST" )
+        .pathInfo( "/v1/jobs" )
+        .respond()
+        .status( HttpStatus.SC_CREATED )
+        .content( driver.getResourceBytes( "oozie-jobs-submit-response.json" ) )
+        .contentType( "application/json" );
+    //System.out.println( "REQUEST LENGTH = " + request.length() );
+
+    URL url = new URL( driver.getUrl( "OOZIE" ) + "/v1/jobs?action=start" + ( driver.isUseGateway() ? "" : "&user.name=" + user ) );
+    HttpHost targetHost = new HttpHost( url.getHost(), url.getPort(), url.getProtocol() );
+    DefaultHttpClient client = new DefaultHttpClient();
+    client.getCredentialsProvider().setCredentials(
+        new AuthScope( targetHost ),
+        new UsernamePasswordCredentials( user, password ) );
+
+    // Create AuthCache instance
+    AuthCache authCache = new BasicAuthCache();
+    // Generate BASIC scheme object and add it to the local auth cache
+    BasicScheme basicAuth = new BasicScheme();
+    authCache.put( targetHost, basicAuth );
+    // Add AuthCache to the execution context
+    BasicHttpContext localContext = new BasicHttpContext();
+    localContext.setAttribute( ClientContext.AUTH_CACHE, authCache );
+
+    HttpPost post = new HttpPost( url.toURI() );
+//      post.getParams().setParameter( "action", "start" );
+    StringEntity entity = new StringEntity( request, org.apache.http.entity.ContentType.create( "application/xml", "UTF-8" ) );
+    post.setEntity( entity );
+    post.setHeader( "X-XSRF-Header", "ksdjfhdsjkfhds" );
+    HttpResponse response = client.execute( targetHost, post, localContext );
+    assertThat( response.getStatusLine().getStatusCode(), Matchers.is(status) );
+    String json = EntityUtils.toString( response.getEntity() );
+
+//      String json = given()
+//          .log().all()
+//          .auth().preemptive().basic( user, password )
+//          .queryParam( "action", "start" )
+//          .contentType( "application/xml;charset=UTF-8" )
+//          .content( request )
+//          .expect()
+//          .log().all()
+//          .statusCode( status )
+//          .when().post( getUrl( "OOZIE" ) + "/v1/jobs" + ( isUseGateway() ? "" : "?user.name=" + user ) ).asString();
+      //System.out.println( "JSON=" + json );
+    String id = JsonPath.from(json).getString( "id" );
+    return id;
+  }
+
+  /* GET /oozie/v1/jobs?filter=user%3Dbansalm&offset=1&len=50 (body JSON; contains URL)
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 46
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 16:10:25 GMT
+  */
+
+  /* GET /oozie/v1/job/0000000-130214094519989-oozie-oozi-W
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 2611
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 17:39:36 GMT
+  */
+
+  /* http://192.168.56.101:11000/oozie/v1/job/0000000-130214094519989-oozie-oozi-W?action=start&user.name=sandbox
+    HTTP/1.1 200 OK
+    Date: Thu, 14 Feb 2013 17:52:13 GMT
+    Content-Length: 0
+    Server: Apache-Coyote/1.1
+    Set-Cookie: hadoop.auth="u=sandbox&p=sandbox&t=simple&e=1360900333149&s=AU/GeHDNBuK9RBRaBJfrqatjfz8="; Version=1; Path=/
+  */
+
+  /* PUT /oozie/v1/job/job-3?action=rerun (request body XML, contains URL)
+    HTTP/1.1 200 OK
+    Date: Thu, 14 Feb 2013 18:07:45 GMT
+    Content-Length: 0
+    Server: Apache-Coyote/1.1
+    Set-Cookie: hadoop.auth="u=sandbox&p=sandbox&t=simple&e=1360901264892&s=DCOczPqn9mcisCeOb5x2C7LIRc8="; Version=1; Path=/
+  */
+
+  /* GET /oozie/v1/job/0000000-130214094519989-oozie-oozi-W?show=info (body JSON, contains URL)
+    HTTP/1.1 200 OK
+    Content-Type: application/json;charset=UTF-8
+    Content-Length: 2611
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 17:45:23 GMT
+  */
+  private String oozieQueryJobStatus( String user, String password, String id, int status ) throws Exception {
+    driver.getMock( "OOZIE" )
+        .expect()
+        .method( "GET" )
+        .pathInfo( "/v1/job/" + id )
+        .respond()
+        .status( HttpStatus.SC_OK )
+        .content( driver.getResourceBytes( "oozie-job-show-info.json" ) )
+        .contentType( "application/json" );
+
+    //NOTE:  For some reason REST-assured doesn't like this and ends up failing with Content-Length issues.
+    URL url = new URL( driver.getUrl( "OOZIE" ) + "/v1/job/" + id + ( driver.isUseGateway() ? "" : "?user.name=" + user ) );
+    HttpHost targetHost = new HttpHost( url.getHost(), url.getPort(), url.getProtocol() );
+    DefaultHttpClient client = new DefaultHttpClient();
+    client.getCredentialsProvider().setCredentials(
+        new AuthScope( targetHost ),
+        new UsernamePasswordCredentials( user, password ) );
+
+    // Create AuthCache instance
+    AuthCache authCache = new BasicAuthCache();
+    // Generate BASIC scheme object and add it to the local auth cache
+    BasicScheme basicAuth = new BasicScheme();
+    authCache.put( targetHost, basicAuth );
+    // Add AuthCache to the execution context
+    BasicHttpContext localContext = new BasicHttpContext();
+    localContext.setAttribute( ClientContext.AUTH_CACHE, authCache );
+
+    HttpGet request = new HttpGet( url.toURI() );
+    request.setHeader("X-XSRF-Header", "ksdhfjkhdsjkf");
+    HttpResponse response = client.execute( targetHost, request, localContext );
+    assertThat( response.getStatusLine().getStatusCode(), Matchers.is(status) );
+    String json = EntityUtils.toString( response.getEntity() );
+    String jobStatus = JsonPath.from(json).getString( "status" );
+    return jobStatus;
+  }
+
+  /* GET /oozie/v1/job/0000000-130214094519989-oozie-oozi-W?show=definition
+    HTTP/1.1 200 OK
+    Content-Type: application/xml;charset=UTF-8
+    Content-Length: 1494
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 17:43:30 GMT
+  */
+
+  /* GET GET /oozie/v1/job/0000000-130214094519989-oozie-oozi-W?show=log
+    HTTP/1.1 200 OK
+    Transfer-Encoding: Identity
+    Content-Type: text/plain;charset=UTF-8
+    Server: Apache-Coyote/1.1
+    Date: Thu, 14 Feb 2013 17:41:43 GMT
+  */
 
 }


Mime
View raw message