knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From su...@apache.org
Subject knox git commit: KNOX-524 Added template for shiro config and made cache key include credentials
Date Thu, 26 Mar 2015 19:50:51 GMT
Repository: knox
Updated Branches:
  refs/heads/master 30e5b9fd7 -> 32feae7fe


KNOX-524 Added template for shiro config and made cache key include credentials


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/32feae7f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/32feae7f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/32feae7f

Branch: refs/heads/master
Commit: 32feae7fedfe20b3bf8d328a63957b695b74b3a9
Parents: 30e5b9f
Author: Sumit Gupta <sumit@apache.org>
Authored: Thu Mar 26 15:47:29 2015 -0400
Committer: Sumit Gupta <sumit@apache.org>
Committed: Thu Mar 26 15:47:29 2015 -0400

----------------------------------------------------------------------
 .../filter/ShiroSubjectIdentityAdapter.java     |  19 +-
 .../gateway/shirorealm/KnoxLdapRealm.java       |  30 ++-
 .../templates/sandbox.knoxrealm.cacheon.xml     | 219 +++++++++++++++++++
 3 files changed, 256 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/32feae7f/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
index 2f0de73..11a0780 100644
--- a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
@@ -103,17 +103,20 @@ public class ShiroSubjectIdentityAdapter implements Filter {
       auditService.getContext().setUsername( principal ); //KM: Audit Fix
       String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME
);
       auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS
);
-      
+
+      Set<String> userGroups = null;
       // map ldap groups saved in session to Java Subject GroupPrincipal(s)
       if (SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS) != null)
{
-        Set<String> userRoles = (Set<String>)SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS);
-        for (String userRole : userRoles) {
-          Principal gp = new GroupPrincipal(userRole);
-          principals.add(gp);
-        }
-        auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS,
"Groups: " + userRoles );
+        userGroups = (Set<String>)SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS);
+      } else {
+        userGroups = new HashSet<String>(shiroSubject.getPrincipals().asSet());
+        userGroups.remove(principal);
       }
-      
+      for (String userGroup : userGroups) {
+        Principal gp = new GroupPrincipal(userGroup);
+        principals.add(gp);
+      }
+      auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS,
"Groups: " + userGroups );
       
 //      The newly constructed Sets check whether this Subject has been set read-only 
 //      before permitting subsequent modifications. The newly created Sets also prevent 

http://git-wip-us.apache.org/repos/asf/knox/blob/32feae7f/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
index 00c0d68..96f7752 100644
--- a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
@@ -42,11 +42,17 @@ import javax.naming.ldap.LdapName;
 import org.apache.hadoop.gateway.GatewayMessages;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.shiro.SecurityUtils;
+import org.apache.shiro.authc.AuthenticationToken;
+import org.apache.shiro.authc.UsernamePasswordToken;
 import org.apache.shiro.authz.AuthorizationInfo;
 import org.apache.shiro.authz.SimpleAuthorizationInfo;
+import org.apache.shiro.crypto.hash.DefaultHashService;
+import org.apache.shiro.crypto.hash.HashRequest;
+import org.apache.shiro.crypto.hash.HashService;
 import org.apache.shiro.realm.ldap.JndiLdapRealm;
 import org.apache.shiro.realm.ldap.LdapContextFactory;
 import org.apache.shiro.realm.ldap.LdapUtils;
+import org.apache.shiro.subject.MutablePrincipalCollection;
 import org.apache.shiro.subject.PrincipalCollection;
 import org.apache.shiro.util.StringUtils;
 
@@ -139,10 +145,11 @@ public class KnoxLdapRealm extends JndiLdapRealm {
     private String userSearchAttributeName;
     private String userObjectClass = "person";
 
+    private HashService hashService = new DefaultHashService();
 
     public KnoxLdapRealm() {
     }
-    
+
     /**
      * Get groups from LDAP.
      * 
@@ -169,14 +176,14 @@ public class KnoxLdapRealm extends JndiLdapRealm {
         return simpleAuthorizationInfo;
     }
 
-    private Set<String> getRoles(final PrincipalCollection principals, 
+    private Set<String> getRoles(PrincipalCollection principals,
         final LdapContextFactory ldapContextFactory) throws NamingException {
         final String username = (String) getAvailablePrincipal(principals);
 
         LdapContext systemLdapCtx = null;
         try {
             systemLdapCtx = ldapContextFactory.getSystemLdapContext();
-            return rolesFor(username, systemLdapCtx, ldapContextFactory);
+            return rolesFor(principals, username, systemLdapCtx, ldapContextFactory);
         } catch (AuthenticationException e) {
           LOG.failedToGetSystemLdapConnection(e);
           return Collections.emptySet();
@@ -185,7 +192,7 @@ public class KnoxLdapRealm extends JndiLdapRealm {
         }
     }
 
-    private Set<String> rolesFor(final String userName, final LdapContext ldapCtx,

+    private Set<String> rolesFor(PrincipalCollection principals, final String userName,
final LdapContext ldapCtx,
         final LdapContextFactory ldapContextFactory) throws NamingException {
         final Set<String> roleNames = new HashSet();
         final Set<String> groupNames = new HashSet();
@@ -213,6 +220,9 @@ public class KnoxLdapRealm extends JndiLdapRealm {
           // save role names and group names in session so that they can be easily looked
up outside of this object
           SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_ROLES, roleNames);
           SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_GROUPS, groupNames);
+          if (!groupNames.isEmpty() && (principals instanceof MutablePrincipalCollection))
{
+            ((MutablePrincipalCollection)principals).addAll(groupNames, getName());
+          }
           LOG.lookedUpUserRoles(roleNames, userName);
         }
         finally {
@@ -548,4 +558,16 @@ public class KnoxLdapRealm extends JndiLdapRealm {
         }
       }
     }
+
+    @Override
+    protected Object getAuthenticationCacheKey(AuthenticationToken token) {
+      if (token instanceof UsernamePasswordToken) {
+        HashRequest.Builder builder = new HashRequest.Builder();
+        StringBuilder key = new StringBuilder();
+        key.append(hashService.computeHash(builder.setSource(((UsernamePasswordToken) token).getUsername()).build()).toHex());
+        key.append(hashService.computeHash(builder.setSource(((UsernamePasswordToken) token).getPassword()).build()).toHex());
+        return key.toString();
+      }
+      return super.getAuthenticationCacheKey(token);
+    }
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/32feae7f/gateway-release/home/templates/sandbox.knoxrealm.cacheon.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sandbox.knoxrealm.cacheon.xml b/gateway-release/home/templates/sandbox.knoxrealm.cacheon.xml
new file mode 100644
index 0000000..8fd4811
--- /dev/null
+++ b/gateway-release/home/templates/sandbox.knoxrealm.cacheon.xml
@@ -0,0 +1,219 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<topology>
+
+    <gateway>
+
+        <provider>
+            <role>authentication</role>
+            <name>ShiroProvider</name>
+            <enabled>true</enabled>
+            <!-- 
+            session timeout in minutes,  this is really idle timeout,
+            defaults to 30mins, if the property value is not defined,, 
+            current client authentication would expire if client idles contiuosly for more
than this value
+            -->
+            <!-- defaults to: 30 minutes
+            <param>
+                <name>sessionTimeout</name>
+                <value>30</value>
+            </param>
+            -->
+
+            <!--
+              Use single KnoxLdapRealm to do authentication and ldap group look up
+            -->
+            <param>
+              <name>main.ldapRealm</name>
+              <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+            </param>
+            <param>
+              <name>main.ldapGroupContextFactory</name>
+              <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+            </param>
+            <param>
+              <name>main.ldapRealm.contextFactory</name>
+              <value>$ldapGroupContextFactory</value>
+            </param>
+            <!-- defaults to: simple
+            <param>
+              <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+              <value>simple</value>
+            </param>
+            -->
+            <param>
+              <name>main.ldapRealm.contextFactory.url</name>
+              <value>ldap://localhost:33389</value>
+            </param>
+            <param>
+              <name>main.ldapRealm.userDnTemplate</name>
+              <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+            </param>
+
+            <param>
+              <name>main.ldapRealm.authorizationEnabled</name>
+              <!-- defaults to: false -->
+              <value>true</value>
+            </param>
+            <!-- defaults to: simple
+            <param>
+              <name>main.ldapRealm.contextFactory.systemAuthenticationMechanism</name>
+              <value>simple</value>
+            </param>
+            -->
+            <param>
+              <name>main.ldapRealm.searchBase</name>
+              <value>ou=groups,dc=hadoop,dc=apache,dc=org</value>
+            </param>
+            <!-- defaults to: groupOfNames
+            <param>
+              <name>main.ldapRealm.groupObjectClass</name>
+              <value>groupOfNames</value>
+            </param>
+            -->
+            <!-- defaults to: member
+            <param>
+              <name>main.ldapRealm.memberAttribute</name>
+              <value>member</value>
+            </param>
+            -->
+            <!-- 
+            If this topology requires authorization then the group lookup can be optimized
+            with the following configuration.
+            Uncommnent the following if you know that all of the clients for the services
+            for which you have routing services defined will be sending JSESSIONID.
+            
+            It may make sense to isolate those services that you can be sure will be sending
+            JSESSIONID - such as ODBC/JDBC drivers for access to HiveServer2.
+            
+            NOTE: including the following config for clients that DO NOT send JSESSIONID
+            will result in authorization failures because group lookup will not be done.
+             -->
+            <param>
+              <name>main.cacheManager</name>
+              <value>org.apache.shiro.cache.MemoryConstrainedCacheManager</value>
+            </param>
+            <param>
+              <name>main.securityManager.cacheManager</name>
+              <value>$cacheManager</value>
+            </param>
+            <param>
+                <name>main.ldapRealm.authenticationCachingEnabled</name>
+                <value>true</value>
+            </param>
+            <param>
+              <name>main.ldapRealm.memberAttributeValueTemplate</name>
+              <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+            </param>
+            <param>
+              <name>main.ldapRealm.contextFactory.systemUsername</name>
+              <value>uid=guest,ou=people,dc=hadoop,dc=apache,dc=org</value>
+            </param>
+            <param>
+              <name>main.ldapRealm.contextFactory.systemPassword</name>
+              <value>guest-password</value>
+            </param>
+
+            <param>
+              <name>urls./**</name> 
+              <value>authcBasic</value>
+            </param>
+
+        </provider>
+
+        <provider>
+            <role>identity-assertion</role>
+            <name>Default</name>
+            <enabled>true</enabled>
+            <param>
+                <name>group.principal.mapping</name>
+                <value>*=users</value>
+            </param>
+        </provider>
+
+        <provider>
+          <role>authorization</role>
+          <name>AclsAuthz</name>
+          <enabled>true</enabled>
+          <param>
+            <name>webhdfs.acl</name>
+            <value>*;analyst;*</value>
+          </param>
+        </provider>
+
+        <!--
+        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible
host names.
+        For example, a hadoop service running in AWS may return a response that includes
URLs containing the
+        some AWS internal host name.  If the client needs to make a subsequent request to
the host identified
+        in those URLs they need to be mapped to external host names that the client Knox
can use to connect.
+
+        If the external hostname and internal host names are same turn of this provider by
setting the value of
+        enabled parameter as false.
+
+        The name parameter specifies the external host names in a comma separated list.
+        The value parameter specifies corresponding internal host names in a comma separated
list.
+
+        Note that when you are using Sandbox, the external hostname needs to be localhost,
as seen in out
+        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to
connect to the
+        Hadoop services using localhost.  In real clusters, external host names would almost
never be localhost.
+        -->
+        <provider>
+            <role>hostmap</role>
+            <name>static</name>
+            <enabled>true</enabled>
+            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+        </provider>
+
+    </gateway>
+
+    <service>
+        <role>NAMENODE</role>
+        <url>hdfs://localhost:8020</url>
+    </service>
+
+    <service>
+        <role>JOBTRACKER</role>
+        <url>rpc://localhost:8050</url>
+    </service>
+
+    <service>
+        <role>WEBHDFS</role>
+        <url>http://localhost:50070/webhdfs</url>
+    </service>
+
+    <service>
+        <role>WEBHCAT</role>
+        <url>http://localhost:50111/templeton</url>
+    </service>
+
+    <service>
+        <role>OOZIE</role>
+        <url>http://localhost:11000/oozie</url>
+    </service>
+
+    <service>
+        <role>WEBHBASE</role>
+        <url>http://localhost:60080</url>
+    </service>
+
+    <service>
+        <role>HIVE</role>
+        <url>http://localhost:10000</url>
+    </service>
+
+</topology>


Mime
View raw message