knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kmin...@apache.org
Subject [2/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.
Date Mon, 09 Nov 2015 21:10:50 GMT
http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
index 81223ac..3676fdc 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
@@ -52,280 +52,280 @@ import static org.junit.Assert.assertThat;
 
 public class KnoxCliSysBindTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = KnoxCliSysBindTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( KnoxCliSysBindTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayTestConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
-//  private static final String uuid = UUID.randomUUID().toString();
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    System.setOut(new PrintStream(outContent));
-//    System.setErr(new PrintStream(errContent));
-//    setupLdap();
-//    setupGateway();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    ldap.stop( true );
-//
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap( ) throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    writeTopology(topoDir, "test-cluster-1.xml", "guest", "guest-password", true);
-//    writeTopology(topoDir, "test-cluster-2.xml", "sam", "sam-password", true);
-//    writeTopology(topoDir, "test-cluster-3.xml", "admin", "admin-password", true);
-//    writeTopology(topoDir, "test-cluster-4.xml", "", "", false);
-//
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//  }
-//
-//  private static void writeTopology(File topoDir, String name, String user, String pass, boolean goodTopology) throws Exception {
-//    File descriptor = new File(topoDir, name);
-//
-//    if(descriptor.exists()){
-//      descriptor.delete();
-//      descriptor = new File(topoDir, name);
-//    }
-//
-//    FileOutputStream stream = new FileOutputStream( descriptor, false );
-//
-//    if(goodTopology) {
-//      createTopology(user, pass).toStream( stream );
-//    } else {
-//      createBadTopology().toStream( stream );
-//    }
-//
-//    stream.close();
-//
-//  }
-//
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  private static XMLTag createBadTopology(){
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag( "gateway" )
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag( "name").addText( "urls./**")
-//        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service")
-//        .addTag("role").addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static XMLTag createTopology(String username, String password) {
-//
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag("gateway")
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapGroupContextFactory")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.searchBase")
-//        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.groupObjectClass")
-//        .addTag("value").addText("groupOfNames").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapRealm.memberAttribute")
-//        .addTag("value").addText("member").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
-//        .addTag("value").addText("uid=" + username + ",ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
-//        .addTag( "value").addText(password).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name" ).addText("urls./**")
-//        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
-//        .addTag("provider" )
-//        .addTag("role").addText( "identity-assertion" )
-//        .addTag( "enabled").addText( "true" )
-//        .addTag("name").addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testLDAPAuth() throws Exception {
-//    LOG_ENTER();
-//
-////    Test 1: Make sure authentication is successful
-//    outContent.reset();
-//    String args[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-1", "--d" };
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args);
-//    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
-//
-//    //    Test 2: Make sure authentication fails
-//    outContent.reset();
-//    String args2[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-2", "--d" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args2);
-//    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
-//
-//
-//    //    Test 3: Make sure authentication is successful
-//    outContent.reset();
-//    String args3[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-3", "--d" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args3);
-//    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
-//    assertThat(outContent.toString(), containsString("Unable to successfully bind to LDAP server with topology credentials"));
-//
-//    //    Test 4: Assert that we get a username/password not present error is printed
-//    outContent.reset();
-//    String args4[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-4" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args4);
-//    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemUsername is not present"));
-//    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemPassword is not present"));
-//
-//
-//    //    Test 5: Assert that we get a username/password not present error is printed
-//    outContent.reset();
-//    String args5[] = { "system-user-auth-test", "--master", "knox", "--cluster", "not-a-cluster" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args5);
-//    assertThat(outContent.toString(), containsString("Topology not-a-cluster does not exist"));
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliSysBindTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( KnoxCliSysBindTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    writeTopology(topoDir, "test-cluster-1.xml", "guest", "guest-password", true);
+    writeTopology(topoDir, "test-cluster-2.xml", "sam", "sam-password", true);
+    writeTopology(topoDir, "test-cluster-3.xml", "admin", "admin-password", true);
+    writeTopology(topoDir, "test-cluster-4.xml", "", "", false);
+
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void writeTopology(File topoDir, String name, String user, String pass, boolean goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+
+    if(goodTopology) {
+      createTopology(user, pass).toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+
+    stream.close();
+
+  }
+
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag( "name").addText( "urls./**")
+        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service")
+        .addTag("role").addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology(String username, String password) {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        .addTag("value").addText("uid=" + username + ",ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value").addText(password).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testLDAPAuth() throws Exception {
+    LOG_ENTER();
+
+//    Test 1: Make sure authentication is successful
+    outContent.reset();
+    String args[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-1", "--d" };
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args);
+    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
+
+    //    Test 2: Make sure authentication fails
+    outContent.reset();
+    String args2[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-2", "--d" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args2);
+    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
+
+
+    //    Test 3: Make sure authentication is successful
+    outContent.reset();
+    String args3[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-3", "--d" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args3);
+    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
+    assertThat(outContent.toString(), containsString("Unable to successfully bind to LDAP server with topology credentials"));
+
+    //    Test 4: Assert that we get a username/password not present error is printed
+    outContent.reset();
+    String args4[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-4" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args4);
+    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemUsername is not present"));
+    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemPassword is not present"));
+
+
+    //    Test 5: Assert that we get a username/password not present error is printed
+    outContent.reset();
+    String args5[] = { "system-user-auth-test", "--master", "knox", "--cluster", "not-a-cluster" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args5);
+    assertThat(outContent.toString(), containsString("Topology not-a-cluster does not exist"));
+
+    LOG_EXIT();
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
index 039ae1c..b2f982b 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
@@ -47,166 +47,166 @@ import static org.hamcrest.xml.HasXPath.hasXPath;
 
 public class OozieServiceDefinitionTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForLiteralTemplateValuesBugKnox394() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-with-var.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "${appPath}/workflow.xml" ) ) );
-//
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForLiteralComplexTemplateValuesBugKnox394() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-with-complex-var.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "${nameNode}/user/${user.name}/${examplesRoot}/apps/hive" ) ) );
-//
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForValuesRelativeToServiceRegistry() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-relative.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "test-scheme://test-host:42/workflow.xml" ) ) );
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForLiteralTemplateValuesBugKnox394() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-with-var.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "${appPath}/workflow.xml" ) ) );
+
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForLiteralComplexTemplateValuesBugKnox394() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-with-complex-var.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "${nameNode}/user/${user.name}/${examplesRoot}/apps/hive" ) ) );
+
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForValuesRelativeToServiceRegistry() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-relative.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "test-scheme://test-host:42/workflow.xml" ) ) );
+
+    LOG_EXIT();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
index e0af24e..7823978 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
@@ -42,438 +42,438 @@ import static org.hamcrest.CoreMatchers.is;
 @Category({FunctionalTests.class, MediumTests.class})
 public class WebHdfsHaFuncTest {
 
-//   private static final long SHORT_TIMEOUT = 1000L;
-//   private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
-//
-//   // Specifies if the test requests should go through the gateway or directly to the services.
-//   // This is frequently used to verify the behavior of the test both with and without the gateway.
-//   private static final boolean USE_GATEWAY = true;
-//
-//   // Specifies if the test requests should be sent to mock services or the real services.
-//   // This is frequently used to verify the behavior of the test both with and without mock services.
-//   private static final boolean USE_MOCK_SERVICES = true;
-//
-//   private static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
-//
-//   private static MockServer masterServer;
-//
-//   private static MockServer standbyServer;
-//
-//   private static int findFreePort() throws IOException {
-//      ServerSocket socket = new ServerSocket(0);
-//      int port = socket.getLocalPort();
-//      socket.close();
-//      return port;
-//   }
-//
-//   /**
-//    * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
-//    * registry of sorts for all of the services that will be used by the test methods.
-//    * The createTopology method is used to create the topology file that would normally be read from disk.
-//    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
-//    * <p/>
-//    * This would normally be done once for this suite but the failure tests start affecting each other depending
-//    * on the state the last 'active' url
-//    *
-//    * @throws Exception Thrown if any failure occurs.
-//    */
-//   @Before
-//   public void setup() throws Exception {
-//      LOG_ENTER();
-//      //Log.setLog(new NoOpLogger());
-//      masterServer = new MockServer("master", true);
-//      standbyServer = new MockServer("standby", true);
-//      GatewayTestConfig config = new GatewayTestConfig();
-//      config.setGatewayPath("gateway");
-//      driver.setResourceBase(WebHdfsHaFuncTest.class);
-//      driver.setupLdap(findFreePort());
-//      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
-//      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
-//      LOG_EXIT();
-//   }
-//
-//   @After
-//   public void cleanup() throws Exception {
-//      LOG_ENTER();
-//      driver.cleanup();
-//      driver.reset();
-//      masterServer.reset();
-//      standbyServer.reset();
-//      LOG_EXIT();
-//   }
-//
-//   /**
-//    * Creates a topology that is deployed to the gateway instance for the test suite.
-//    * Note that this topology is shared by all of the test methods in this suite.
-//    *
-//    * @return A populated XML structure for a topology file.
-//    */
-//   private static XMLTag createTopology() {
-//      XMLTag xml = XMLDoc.newDocument(true)
-//            .addRoot("topology")
-//            .addTag("gateway")
-//            .addTag("provider")
-//            .addTag("role").addText("webappsec")
-//            .addTag("name").addText("WebAppSec")
-//            .addTag("enabled").addText("true")
-//            .addTag("param")
-//            .addTag("name").addText("csrf.enabled")
-//            .addTag("value").addText("true").gotoParent().gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("authentication")
-//            .addTag("name").addText("ShiroProvider")
-//            .addTag("enabled").addText("true")
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm")
-//            .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//            .addTag("value").addText("simple").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("urls./**")
-//            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("identity-assertion")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("Default").gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("authorization")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("AclsAuthz").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("webhdfs-acl")
-//            .addTag("value").addText("hdfs;*;*").gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("ha")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("HaProvider")
-//            .addTag("param")
-//            .addTag("name").addText("WEBHDFS")
-//            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
-//            .gotoRoot()
-//            .addTag("service")
-//            .addTag("role").addText("WEBHDFS")
-//            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
-//            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
-//            .gotoRoot();
-////     System.out.println( "GATEWAY=" + xml.toString() );
-//      return xml;
-//   }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testBasicListOperation() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-//            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   @Ignore( "KNOX-446" )
-//   public void testFailoverListOperation() throws Exception {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //Shutdown master and expect standby to serve the list response
-//      masterServer.stop();
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-//            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      standbyServer.isEmpty();
-//      masterServer.start();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testFailoverLimit() throws Exception {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //Shutdown master and expect standby to serve the list response
-//      masterServer.stop();
-//      standbyServer.stop();
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      standbyServer.start();
-//      masterServer.start();
-//      LOG_EXIT();
-//   }
-//
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   @Ignore( "KNOX-446" )
-//   public void testServerInStandby() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //make master the server that is in standby
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      //standby server is 'active' in this test case and serves the list response
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
+   private static final long SHORT_TIMEOUT = 1000L;
+   private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
+
+   // Specifies if the test requests should go through the gateway or directly to the services.
+   // This is frequently used to verify the behavior of the test both with and without the gateway.
+   private static final boolean USE_GATEWAY = true;
+
+   // Specifies if the test requests should be sent to mock services or the real services.
+   // This is frequently used to verify the behavior of the test both with and without mock services.
+   private static final boolean USE_MOCK_SERVICES = true;
+
+   private static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
+
+   private static MockServer masterServer;
+
+   private static MockServer standbyServer;
+
+   private static int findFreePort() throws IOException {
+      ServerSocket socket = new ServerSocket(0);
+      int port = socket.getLocalPort();
+      socket.close();
+      return port;
+   }
+
+   /**
+    * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
+    * registry of sorts for all of the services that will be used by the test methods.
+    * The createTopology method is used to create the topology file that would normally be read from disk.
+    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
+    * <p/>
+    * This would normally be done once for this suite but the failure tests start affecting each other depending
+    * on the state the last 'active' url
+    *
+    * @throws Exception Thrown if any failure occurs.
+    */
+   @Before
+   public void setup() throws Exception {
+      LOG_ENTER();
+      //Log.setLog(new NoOpLogger());
+      masterServer = new MockServer("master", true);
+      standbyServer = new MockServer("standby", true);
+      GatewayTestConfig config = new GatewayTestConfig();
+      config.setGatewayPath("gateway");
+      driver.setResourceBase(WebHdfsHaFuncTest.class);
+      driver.setupLdap(findFreePort());
+      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
+      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
+      LOG_EXIT();
+   }
+
+   @After
+   public void cleanup() throws Exception {
+      LOG_ENTER();
+      driver.cleanup();
+      driver.reset();
+      masterServer.reset();
+      standbyServer.reset();
+      LOG_EXIT();
+   }
+
+   /**
+    * Creates a topology that is deployed to the gateway instance for the test suite.
+    * Note that this topology is shared by all of the test methods in this suite.
+    *
+    * @return A populated XML structure for a topology file.
+    */
+   private static XMLTag createTopology() {
+      XMLTag xml = XMLDoc.newDocument(true)
+            .addRoot("topology")
+            .addTag("gateway")
+            .addTag("provider")
+            .addTag("role").addText("webappsec")
+            .addTag("name").addText("WebAppSec")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("csrf.enabled")
+            .addTag("value").addText("true").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authentication")
+            .addTag("name").addText("ShiroProvider")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm")
+            .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.userDnTemplate")
+            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.url")
+            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+            .addTag("value").addText("simple").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("urls./**")
+            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("identity-assertion")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("Default").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authorization")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("AclsAuthz").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("webhdfs-acl")
+            .addTag("value").addText("hdfs;*;*").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("ha")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("HaProvider")
+            .addTag("param")
+            .addTag("name").addText("WEBHDFS")
+            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
+            .gotoRoot()
+            .addTag("service")
+            .addTag("role").addText("WEBHDFS")
+            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
+            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
+            .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+      return xml;
+   }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+   public void testBasicListOperation() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   @Ignore( "KNOX-446" )
+   public void testFailoverListOperation() throws Exception {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //Shutdown master and expect standby to serve the list response
+      masterServer.stop();
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      standbyServer.isEmpty();
+      masterServer.start();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testFailoverLimit() throws Exception {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //Shutdown master and expect standby to serve the list response
+      masterServer.stop();
+      standbyServer.stop();
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      standbyServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInStandbyFailoverLimit() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //make master the server that is in standby
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      standbyServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeMode() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .expect()
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      standbyServer.start();
+      masterServer.start();
+      LOG_EXIT();
+   }
+
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   @Ignore( "KNOX-446" )
+   public void testServerInStandby() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //make master the server that is in standby
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      //standby server is 'active' in this test case and serves the list response
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      standbyServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInStandbyFailoverLimit() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //make master the server that is in standby
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("boolean", is(true))
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeModeRetriableException() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/new")
-//            .queryParam("op", "MKDIRS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/new")
-//            .queryParam("op", "MKDIRS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "MKDIRS")
-//            .expect()
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      standbyServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeMode() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("boolean", is(true))
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeModeRetriableException() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/new")
+            .queryParam("op", "MKDIRS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/new")
+            .queryParam("op", "MKDIRS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "MKDIRS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("boolean", is(true))
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeModeRetryLimit() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("boolean", is(true))
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeModeRetryLimit() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
 }


Mime
View raw message