knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dillido...@apache.org
Subject git commit: KNOX-216 : add functional tests to test ldap group lookup and usage
Date Thu, 05 Dec 2013 19:24:55 GMT
Updated Branches:
  refs/heads/master 11b5c64fd -> 841be250a


KNOX-216 : add functional tests to test ldap group lookup and usage


Project: http://git-wip-us.apache.org/repos/asf/incubator-knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-knox/commit/841be250
Tree: http://git-wip-us.apache.org/repos/asf/incubator-knox/tree/841be250
Diff: http://git-wip-us.apache.org/repos/asf/incubator-knox/diff/841be250

Branch: refs/heads/master
Commit: 841be250a382207abbed2a72e3b765b66ae1be0e
Parents: 11b5c64
Author: Dilli Dorai Arumugam <darumugam@hortonworks.com>
Authored: Wed Dec 4 15:02:56 2013 -0800
Committer: Dilli Dorai Arumugam <darumugam@hortonworks.com>
Committed: Thu Dec 5 11:22:37 2013 -0800

----------------------------------------------------------------------
 .../gateway/GatewayLdapGroupFuncTest.java       | 273 +++++++++++++++++++
 .../gateway/GatewayLdapGroupFuncTest/users.ldif |  94 +++++++
 2 files changed, 367 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/841be250/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
new file mode 100644
index 0000000..edfab66
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
@@ -0,0 +1,273 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.http.HttpStatus;
+import org.apache.log4j.Appender;
+import org.hamcrest.MatcherAssert;
+import org.hamcrest.Matchers;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.URL;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import static com.jayway.restassured.RestAssured.given;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Functional test to verify : looking up ldap groups from directory 
+ * and using them in acl authorization checks
+ *
+ */
+public class GatewayLdapGroupFuncTest {
+
+  private static Class RESOURCE_BASE_CLASS = GatewayLdapGroupFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapGroupFuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    //appenders = NoOpAppender.setUp();
+    int port = setupLdap();
+    setupGateway(port);
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+  }
+
+  public static int setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI()
), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+    return port;
+  }
+
+  public static void setupGateway(int ldapPort) throws IOException {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+    testConfig.setDeploymentDir( "clusters" );
+
+    File deployDir = new File( gatewayDir, testConfig.getDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( deployDir, "test-cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/test-cluster";
+  }
+
+  private static XMLTag createTopology(int ldapPort) {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+        
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm"
)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory"
)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
+        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism"
)
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
+        .addTag( "value" ).addText( "true" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism"
)
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
+        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
+        .addTag( "value" ).addText( "groupofnames" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.uniqueMemberAttribute" )
+        .addTag( "value" ).addText( "member" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.uniqueMemberAttributeValueTemplate" )
+        .addTag( "value" ).addText( "cn={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
+        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
+        .addTag( "value" ).addText( "guest-password" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" )
+        
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "name" ).addText( "AclsAuthz" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
+        .addTag( "value" ).addText( "*;analyst;*" )
+        
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Pseudo" ).gotoParent()
+        
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+         // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue()
);
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  @Ignore
+  // @Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test
+  public void testGroupMember() throws ClassNotFoundException {
+
+    String username = "sam";
+    String password = "sam-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_OK )
+        .contentType( "text/plain" )
+        .body( is( "test-service-response" ) )
+        .when().get( serviceUrl );
+  }
+  
+  @Test
+  public void testNonGroupMember() throws ClassNotFoundException {
+
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_UNAUTHORIZED )
+        .when().get( serviceUrl );
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-knox/blob/841be250/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest/users.ldif
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest/users.ldif
new file mode 100644
index 0000000..f10851a
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest/users.ldif
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# this ldif file is provided as a template to illustrate
+# use of ldapgroup(s)
+
+version: 1
+
+# Please replace with site specific values
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+# entry for a sample people container
+# please replace with site specific values
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+# entry for a sample end user
+# please replace with site specific values
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: User
+uid: guest
+userPassword:guest-password
+
+# entry for sample user sam
+dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: sam
+sn: sam
+uid: sam
+userPassword:sam-password
+
+# entry for sample user tom
+dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: tom
+sn: tom
+uid: tom
+userPassword:tom-password
+
+# create FIRST Level groups branch
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+description: generic groups branch
+
+# create the analyst group under groups
+dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: analyst
+description:analyst  group
+member: cn=sam,ou=people,dc=hadoop,dc=apache,dc=org
+member: cn=tom,ou=people,dc=hadoop,dc=apache,dc=org
+
+
+# create the scientist group under groups
+dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: scientist
+description: scientist group
+member: cn=sam,ou=people,dc=hadoop,dc=apache,dc=org
+


Mime
View raw message