knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kmin...@apache.org
Subject [1/4] KNOX-88: Support HDFS HA
Date Fri, 15 Aug 2014 20:41:29 GMT
Repository: knox
Updated Branches:
  refs/heads/master 93b56251f -> 9aaeeed17


http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
new file mode 100644
index 0000000..3d0a55d
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.hadoop.test.category.FunctionalTests;
+import org.apache.hadoop.test.category.MediumTests;
+import org.apache.hadoop.test.log.NoOpLogger;
+import org.apache.hadoop.test.mock.MockServer;
+import org.apache.http.HttpStatus;
+import org.eclipse.jetty.util.log.Log;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+
+import static com.jayway.restassured.RestAssured.given;
+import static org.hamcrest.CoreMatchers.is;
+
+@Category({FunctionalTests.class, MediumTests.class})
+public class WebHdfsHaFuncTest {
+
+   // Specifies if the test requests should go through the gateway or directly to the services.
+   // This is frequently used to verify the behavior of the test both with and without the
gateway.
+   private static final boolean USE_GATEWAY = true;
+
+   // Specifies if the test requests should be sent to mock services or the real services.
+   // This is frequently used to verify the behavior of the test both with and without mock
services.
+   private static final boolean USE_MOCK_SERVICES = true;
+
+   private static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
+
+   private static MockServer masterServer;
+
+   private static MockServer standbyServer;
+
+   private static int findFreePort() throws IOException {
+      ServerSocket socket = new ServerSocket(0);
+      int port = socket.getLocalPort();
+      socket.close();
+      return port;
+   }
+
+   /**
+    * Creates a deployment of a gateway instance that all test methods will share.  This
method also creates a
+    * registry of sorts for all of the services that will be used by the test methods.
+    * The createTopology method is used to create the topology file that would normally be
read from disk.
+    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
+    * <p/>
+    * This would normally be done once for this suite but the failure tests start affecting
each other depending
+    * on the state the last 'active' url
+    *
+    * @throws Exception Thrown if any failure occurs.
+    */
+   @Before
+   public void setup() throws Exception {
+      Log.setLog(new NoOpLogger());
+      masterServer = new MockServer("master", true);
+      standbyServer = new MockServer("standby", true);
+      GatewayTestConfig config = new GatewayTestConfig();
+      config.setGatewayPath("gateway");
+      driver.setResourceBase(WebHdfsHaFuncTest.class);
+      driver.setupLdap(findFreePort());
+      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs",
USE_MOCK_SERVICES);
+      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
+   }
+
+   @After
+   public void cleanup() throws Exception {
+      driver.cleanup();
+      driver.reset();
+      masterServer.reset();
+      standbyServer.reset();
+   }
+
+   /**
+    * Creates a topology that is deployed to the gateway instance for the test suite.
+    * Note that this topology is shared by all of the test methods in this suite.
+    *
+    * @return A populated XML structure for a topology file.
+    */
+   private static XMLTag createTopology() {
+      XMLTag xml = XMLDoc.newDocument(true)
+            .addRoot("topology")
+            .addTag("gateway")
+            .addTag("provider")
+            .addTag("role").addText("webappsec")
+            .addTag("name").addText("WebAppSec")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("csrf.enabled")
+            .addTag("value").addText("true").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authentication")
+            .addTag("name").addText("ShiroProvider")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm")
+            .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.userDnTemplate")
+            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.url")
+            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+            .addTag("value").addText("simple").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("urls./**")
+            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("identity-assertion")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("Pseudo").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authorization")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("AclsAuthz").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("webhdfs-acl")
+            .addTag("value").addText("hdfs;*;*").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("ha")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("HaProvider")
+            .addTag("param")
+            .addTag("name").addText("WEBHDFS")
+            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
+            .gotoRoot()
+            .addTag("service")
+            .addTag("role").addText("WEBHDFS")
+            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
+            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
+            .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+      return xml;
+   }
+
+   @Test
+   public void testBasicListOperation() throws IOException {
+      String username = "hdfs";
+      String password = "hdfs-password";
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+   }
+
+   @Test
+   public void testFailoverListOperation() throws Exception {
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //Shutdown master and expect standby to serve the list response
+      masterServer.stop();
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      standbyServer.isEmpty();
+      masterServer.start();
+   }
+
+   @Test
+   public void testServerInStandby() throws IOException {
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //make master the server that is in standby
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      //standby server is 'active' in this test case and serves the list response
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      standbyServer.isEmpty();
+   }
+
+   @Test
+   public void testServerInSafeMode() throws IOException {
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("boolean", is(true))
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
+      masterServer.isEmpty();
+   }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
index c9e8a76..a93eb33 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -82,7 +82,7 @@ public class DeploymentFactoryFuncTest {
     topology.setName( "test-cluster" );
     Service service = new Service();
     service.setRole( "WEBHDFS" );
-    service.setUrl( "http://localhost:50070/test-service-url" );
+    service.addUrl( "http://localhost:50070/test-service-url" );
     topology.addService( service );
 
     Provider provider = new Provider();
@@ -136,7 +136,7 @@ public class DeploymentFactoryFuncTest {
     topology.setName( "test-cluster" );
     Service service = new Service();
     service.setRole( "WEBHDFS" );
-    service.setUrl( "http://localhost:50070/test-service-url" );
+    service.addUrl( "http://localhost:50070/test-service-url" );
     topology.addService( service );
 
     Provider provider = new Provider();
@@ -190,7 +190,7 @@ public class DeploymentFactoryFuncTest {
     topology.setName( "test-cluster" );
     Service service = new Service();
     service.setRole( "WEBHDFS" );
-    service.setUrl( "http://localhost:50070/webhdfs" );
+    service.addUrl( "http://localhost:50070/webhdfs" );
     topology.addService( service );
     Provider provider = new Provider();
     provider.setRole( "authentication" );

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/users.ldif
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/users.ldif
new file mode 100644
index 0000000..e4cc143
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/users.ldif
@@ -0,0 +1,121 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: CraigWalls
+sn: Walls
+uid: allowedUser
+userPassword:password
+
+dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: LarryWalls
+sn: Walls
+uid: hdfs
+userPassword:hdfs-password
+
+dn: uid=mapred,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: MapReduceUser
+sn: User
+uid: mapred
+userPassword:mapred-password
+
+dn: uid=hive,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: HiveUser
+sn: User
+uid: hive
+userPassword:hive-password
+
+dn: uid=hcat,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: HCatUser
+sn: User
+uid: hcat
+userPassword:hcat-password
+
+dn: uid=hbase,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: HBaseUser
+sn: User
+uid: hbase
+userPassword:hbase-password
+
+dn: uid=kminder,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: KevinMinder
+sn: Minder
+uid: kminder
+userPassword:kminder-password
+
+dn: uid=deniedUser,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: JohnSmith
+sn: Smith
+uid: deniedUser
+userPassword:password
+
+dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:groupOfNames
+cn: admin
+member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-standby.json
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-standby.json
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-standby.json
new file mode 100644
index 0000000..541c64d
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-standby.json
@@ -0,0 +1,5 @@
+{"RemoteException": {
+    "exception": "StandbyException",
+    "javaClassName": "org.apache.hadoop.ipc.StandbyException",
+    "message": "Operation category READ is not supported in state standby"
+}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-success.json
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-success.json
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-success.json
new file mode 100644
index 0000000..7197320
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-liststatus-success.json
@@ -0,0 +1,88 @@
+{"FileStatuses": {
+    "FileStatus": [
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 1,
+            "fileId": 16394,
+            "group": "hadoop",
+            "length": 0,
+            "modificationTime": 1406152272147,
+            "owner": "yarn",
+            "pathSuffix": "app-logs",
+            "permission": "777",
+            "replication": 0,
+            "type": "DIRECTORY"
+        },
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 4,
+            "fileId": 16389,
+            "group": "hdfs",
+            "length": 0,
+            "modificationTime": 1406152166119,
+            "owner": "hdfs",
+            "pathSuffix": "apps",
+            "permission": "755",
+            "replication": 0,
+            "type": "DIRECTORY"
+        },
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 1,
+            "fileId": 16395,
+            "group": "hdfs",
+            "length": 0,
+            "modificationTime": 1406151981789,
+            "owner": "mapred",
+            "pathSuffix": "mapred",
+            "permission": "755",
+            "replication": 0,
+            "type": "DIRECTORY"
+        },
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 2,
+            "fileId": 16397,
+            "group": "hdfs",
+            "length": 0,
+            "modificationTime": 1406151981796,
+            "owner": "hdfs",
+            "pathSuffix": "mr-history",
+            "permission": "755",
+            "replication": 0,
+            "type": "DIRECTORY"
+        },
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 3,
+            "fileId": 16386,
+            "group": "hdfs",
+            "length": 0,
+            "modificationTime": 1406730902050,
+            "owner": "hdfs",
+            "pathSuffix": "tmp",
+            "permission": "777",
+            "replication": 0,
+            "type": "DIRECTORY"
+        },
+        {
+            "accessTime": 0,
+            "blockSize": 0,
+            "childrenNum": 4,
+            "fileId": 16387,
+            "group": "hdfs",
+            "length": 0,
+            "modificationTime": 1406152166125,
+            "owner": "hdfs",
+            "pathSuffix": "user",
+            "permission": "755",
+            "replication": 0,
+            "type": "DIRECTORY"
+        }
+    ]
+}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode-off.json
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode-off.json
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode-off.json
new file mode 100644
index 0000000..62aa3be
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode-off.json
@@ -0,0 +1 @@
+{"boolean":true}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode.json
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode.json
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode.json
new file mode 100644
index 0000000..82eb322
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/WebHdfsHaFuncTest/webhdfs-rename-safemode.json
@@ -0,0 +1,5 @@
+{"RemoteException": {
+    "exception": "SafeModeException",
+    "javaClassName": "org.apache.hadoop.hdfs.server.namenode.SafeModeException",
+    "message": "Cannot rename /user/sumit.gupta/shellCommands.sh. Name node is in safe mode.\nIt
was turned on manually. Use \"hdfs dfsadmin -safemode leave\" to turn safe mode off."
+}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/9aaeeed1/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9e768e0..4ffe2ea 100644
--- a/pom.xml
+++ b/pom.xml
@@ -54,6 +54,7 @@
         <module>gateway-provider-security-authz-acls</module>
         <module>gateway-provider-identity-assertion-pseudo</module>
         <module>gateway-provider-jersey</module>
+        <module>gateway-provider-ha</module>
         <module>gateway-service-admin</module>
         <module>gateway-service-as</module>
         <module>gateway-service-hbase</module>
@@ -435,6 +436,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>gateway-provider-ha</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-service-tgs</artifactId>
                 <version>${gateway-version}</version>
             </dependency>


Mime
View raw message