knox-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kris...@apache.org
Subject [knox] branch master updated: KNOX-1859 - Improve alias lookup for HadoopAuthProvider (#89)
Date Wed, 15 May 2019 20:12:52 GMT
This is an automated email from the ASF dual-hosted git repository.

krisden pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/knox.git


The following commit(s) were added to refs/heads/master by this push:
     new 5444a2a  KNOX-1859 - Improve alias lookup for HadoopAuthProvider (#89)
5444a2a is described below

commit 5444a2ae2a5f7270f4639e513db7396f822eb81c
Author: Kevin Risden <risdenk@users.noreply.github.com>
AuthorDate: Wed May 15 15:12:47 2019 -0500

    KNOX-1859 - Improve alias lookup for HadoopAuthProvider (#89)
    
    Signed-off-by: Kevin Risden <krisden@apache.org>
---
 gateway-provider-security-hadoopauth/pom.xml       |   9 +-
 .../gateway/hadoopauth/HadoopAuthMessages.java     |   7 --
 .../deploy/HadoopAuthDeploymentContributor.java    |  42 +--------
 .../hadoopauth/filter/HadoopAuthFilter.java        |  30 ++++++
 .../HadoopAuthDeploymentContributorTest.java       |  32 +------
 .../hadoopauth/filter/HadoopAuthFilterTest.java    | 104 +++++++++++++++++++++
 6 files changed, 147 insertions(+), 77 deletions(-)

diff --git a/gateway-provider-security-hadoopauth/pom.xml b/gateway-provider-security-hadoopauth/pom.xml
index 299ea58..b61d070 100755
--- a/gateway-provider-security-hadoopauth/pom.xml
+++ b/gateway-provider-security-hadoopauth/pom.xml
@@ -35,6 +35,10 @@
         </dependency>
         <dependency>
             <groupId>org.apache.knox</groupId>
+            <artifactId>gateway-server</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.knox</groupId>
             <artifactId>gateway-spi</artifactId>
         </dependency>
         <dependency>
@@ -62,10 +66,5 @@
             <artifactId>gateway-test-utils</artifactId>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.knox</groupId>
-            <artifactId>gateway-server</artifactId>
-            <scope>test</scope>
-        </dependency>
     </dependencies>
 </project>
diff --git a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/HadoopAuthMessages.java
b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/HadoopAuthMessages.java
index 333bf4a..19b8c6c 100755
--- a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/HadoopAuthMessages.java
+++ b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/HadoopAuthMessages.java
@@ -20,16 +20,9 @@ package org.apache.knox.gateway.hadoopauth;
 import org.apache.knox.gateway.i18n.messages.Message;
 import org.apache.knox.gateway.i18n.messages.MessageLevel;
 import org.apache.knox.gateway.i18n.messages.Messages;
-import org.apache.knox.gateway.i18n.messages.StackTrace;
 
 @Messages(logger="org.apache.knox.gateway.provider.global.hadoopauth")
 public interface HadoopAuthMessages {
   @Message( level = MessageLevel.DEBUG, text = "Hadoop Authentication Asserted Principal:
{0}" )
   void hadoopAuthAssertedPrincipal(String name);
-
-  @Message( level = MessageLevel.ERROR, text = "Alias service exception: {0}" )
-  void aliasServiceException(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
-
-  @Message( level = MessageLevel.ERROR, text = "Unable to get password for {0}: {1}" )
-  void unableToGetPassword(String name, @StackTrace( level = MessageLevel.DEBUG ) Exception
e);
 }
diff --git a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/deploy/HadoopAuthDeploymentContributor.java
b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/deploy/HadoopAuthDeploymentContributor.java
index 4cc4925..4945306 100755
--- a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/deploy/HadoopAuthDeploymentContributor.java
+++ b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/deploy/HadoopAuthDeploymentContributor.java
@@ -21,12 +21,8 @@ import org.apache.knox.gateway.deploy.DeploymentContext;
 import org.apache.knox.gateway.deploy.ProviderDeploymentContributorBase;
 import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
 import org.apache.knox.gateway.descriptor.ResourceDescriptor;
-import org.apache.knox.gateway.hadoopauth.HadoopAuthMessages;
 import org.apache.knox.gateway.hadoopauth.filter.HadoopAuthFilter;
 import org.apache.knox.gateway.hadoopauth.filter.HadoopAuthPostFilter;
-import org.apache.knox.gateway.i18n.messages.MessagesFactory;
-import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.knox.gateway.services.security.AliasServiceException;
 import org.apache.knox.gateway.topology.Provider;
 import org.apache.knox.gateway.topology.Service;
 
@@ -37,15 +33,11 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 public class HadoopAuthDeploymentContributor extends ProviderDeploymentContributorBase {
-
-  private static HadoopAuthMessages log = MessagesFactory.get( HadoopAuthMessages.class );
-
   private static final String HADOOPAUTH_FILTER_CLASSNAME = HadoopAuthFilter.class.getCanonicalName();
   private static final String HADOOPAUTH_POSTFILTER_CLASSNAME = HadoopAuthPostFilter.class.getCanonicalName();
 
   public static final String ROLE = "authentication";
   public static final String NAME = "HadoopAuth";
-  private AliasService as;
 
   @Override
   public String getRole() {
@@ -57,27 +49,9 @@ public class HadoopAuthDeploymentContributor extends ProviderDeploymentContribut
     return NAME;
   }
 
-  public void setAliasService(AliasService as) {
-    this.as = as;
-  }
-
-  @Override
-  public void initializeContribution(DeploymentContext context) {
-    super.initializeContribution(context);
-  }
-
   @Override
   public void contributeFilter(DeploymentContext context, Provider provider, Service service,
       ResourceDescriptor resource, List<FilterParamDescriptor> params) {
-    String clusterName = context.getTopology().getName();
-
-    List<String> aliases = new ArrayList<>();
-    try {
-      aliases = this.as.getAliasesForCluster(clusterName);
-    } catch (AliasServiceException e) {
-      log.aliasServiceException(e);
-    }
-
     // blindly add all the provider params as filter init params
     if (params == null) {
       params = new ArrayList<>();
@@ -85,20 +59,12 @@ public class HadoopAuthDeploymentContributor extends ProviderDeploymentContribut
     Map<String, String> providerParams = provider.getParams();
     for(Entry<String, String> entry : providerParams.entrySet()) {
       String key = entry.getKey().toLowerCase(Locale.ROOT);
-      String value = null;
-      if(aliases.contains(key)) {
-        try {
-          value = String.valueOf(this.as.getPasswordFromAliasForCluster(clusterName, key));
-        } catch (AliasServiceException e) {
-          log.unableToGetPassword(key, e);
-        }
-      } else {
-        value = entry.getValue();
-      }
-
-      params.add( resource.createFilterParam().name( key ).value( value ) );
+      params.add(resource.createFilterParam().name(key).value(entry.getValue()));
     }
 
+    String clusterName = context.getTopology().getName();
+    params.add(resource.createFilterParam().name("clusterName").value(clusterName));
+
     resource.addFilter().name( getName() ).role( getRole() ).impl(HADOOPAUTH_FILTER_CLASSNAME).params(
params );
     resource.addFilter().name( "Post" + getName() ).role( getRole() ).impl(HADOOPAUTH_POSTFILTER_CLASSNAME).params(
params );
   }
diff --git a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilter.java
b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilter.java
index a81855b..8d41dc2 100755
--- a/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilter.java
+++ b/gateway-provider-security-hadoopauth/src/main/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilter.java
@@ -17,6 +17,12 @@
  */
 package org.apache.knox.gateway.hadoopauth.filter;
 
+import org.apache.knox.gateway.GatewayServer;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.ServiceType;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+
 import java.util.Enumeration;
 import java.util.Properties;
 
@@ -47,12 +53,36 @@ public class HadoopAuthFilter extends
 
   @Override
   protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws
ServletException {
+    GatewayServices services = GatewayServer.getGatewayServices();
+    AliasService aliasService = services.getService(ServiceType.ALIAS_SERVICE);
+
+    return getConfiguration(aliasService, configPrefix, filterConfig);
+  }
+
+  // Visible for testing
+  Properties getConfiguration(AliasService aliasService, String configPrefix,
+                                        FilterConfig filterConfig) throws ServletException
{
+
+    String clusterName = filterConfig.getInitParameter("clusterName");
+
     Properties props = new Properties();
     Enumeration<String> names = filterConfig.getInitParameterNames();
     while (names.hasMoreElements()) {
       String name = names.nextElement();
       if (name.startsWith(configPrefix)) {
         String value = filterConfig.getInitParameter(name);
+
+        // Handle the case value is an alias
+        if (value.startsWith("${ALIAS=") && value.endsWith("}")) {
+          String alias = value.substring("${ALIAS=".length(), value.length() - 1);
+          try {
+            value = String.valueOf(
+                aliasService.getPasswordFromAliasForCluster(clusterName, alias));
+          } catch (AliasServiceException e) {
+            throw new ServletException("Unable to retrieve alias for config: " + name, e);
+          }
+        }
+
         props.put(name.substring(configPrefix.length()), value);
       }
     }
diff --git a/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/HadoopAuthDeploymentContributorTest.java
b/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/HadoopAuthDeploymentContributorTest.java
index 652509f..1c64874 100644
--- a/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/HadoopAuthDeploymentContributorTest.java
+++ b/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/HadoopAuthDeploymentContributorTest.java
@@ -25,10 +25,6 @@ import org.apache.knox.gateway.descriptor.GatewayDescriptor;
 import org.apache.knox.gateway.descriptor.ResourceDescriptor;
 import org.apache.knox.gateway.descriptor.impl.GatewayDescriptorImpl;
 import org.apache.knox.gateway.hadoopauth.deploy.HadoopAuthDeploymentContributor;
-import org.apache.knox.gateway.services.ServiceType;
-import org.apache.knox.gateway.services.GatewayServices;
-import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
 import org.apache.knox.gateway.topology.Provider;
 import org.apache.knox.gateway.topology.Topology;
 import org.easymock.EasyMock;
@@ -36,7 +32,6 @@ import org.jboss.shrinkwrap.api.ShrinkWrap;
 import org.jboss.shrinkwrap.api.spec.WebArchive;
 import org.junit.Test;
 
-import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -67,20 +62,20 @@ public class HadoopAuthDeploymentContributorTest {
   }
 
   @Test
-  public void testDeployment() throws Exception {
+  public void testDeployment() {
     String aliasKey = "signature.secret";
-    String aliasValue = "password";
+    String aliasValue = "${ALIAS=signature.secret}";
     String normalKey = "type";
     String normalValue = "simple";
 
-    WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-acrhive" );
+    WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
 
     Provider provider = new Provider();
     provider.setEnabled( true );
     provider.setName( HadoopAuthDeploymentContributor.NAME );
     // Keep order of params in map for testing
     Map<String, String> params = new TreeMap<>();
-    params.put(aliasKey, aliasKey);
+    params.put(aliasKey, aliasValue);
     params.put(normalKey, normalValue);
     provider.setParams(params);
 
@@ -95,30 +90,13 @@ public class HadoopAuthDeploymentContributorTest {
     GatewayDescriptor gatewayDescriptor = new GatewayDescriptorImpl();
     ResourceDescriptor resource = gatewayDescriptor.createResource();
 
-    AliasService as = EasyMock.createNiceMock( AliasService.class );
-    EasyMock.expect(as.getAliasesForCluster(context.getTopology().getName()))
-        .andReturn(Collections.singletonList(aliasKey)).anyTimes();
-    EasyMock.expect(as.getPasswordFromAliasForCluster(context.getTopology().getName(), aliasKey))
-        .andReturn(aliasValue.toCharArray()).anyTimes();
-    EasyMock.replay( as );
-    DefaultCryptoService cryptoService = new DefaultCryptoService();
-    cryptoService.setAliasService( as );
-
-    GatewayServices gatewayServices = EasyMock.createNiceMock( GatewayServices.class );
-    EasyMock.expect( gatewayServices.getService( ServiceType.CRYPTO_SERVICE ) ).andReturn(
cryptoService ).anyTimes();
-
     HadoopAuthDeploymentContributor contributor = new HadoopAuthDeploymentContributor();
-    contributor.setAliasService(as);
 
     assertThat( contributor.getRole(), is( HadoopAuthDeploymentContributor.ROLE ) );
     assertThat( contributor.getName(), is( HadoopAuthDeploymentContributor.NAME ) );
 
-    // Just make sure it doesn't blow up.
     contributor.initializeContribution( context );
-
     contributor.contributeFilter(context, provider, null, resource, null);
-
-    // Just make sure it doesn't blow up.
     contributor.finalizeContribution( context );
 
     // Check that the params are properly setup
@@ -126,7 +104,7 @@ public class HadoopAuthDeploymentContributorTest {
     assertNotNull(hadoopAuthFilterDescriptor);
     assertEquals(HadoopAuthDeploymentContributor.NAME, hadoopAuthFilterDescriptor.name());
     List<FilterParamDescriptor> hadoopAuthFilterParams = hadoopAuthFilterDescriptor.params();
-    assertEquals(2, hadoopAuthFilterParams.size());
+    assertEquals(3, hadoopAuthFilterParams.size());
 
     FilterParamDescriptor paramDescriptor = hadoopAuthFilterParams.get(0);
     assertEquals(aliasKey, paramDescriptor.name());
diff --git a/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilterTest.java
b/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilterTest.java
new file mode 100644
index 0000000..20b924d
--- /dev/null
+++ b/gateway-provider-security-hadoopauth/src/test/java/org/apache/knox/gateway/hadoopauth/filter/HadoopAuthFilterTest.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.hadoopauth.filter;
+
+import org.apache.knox.gateway.deploy.DeploymentContext;
+import org.apache.knox.gateway.services.ServiceType;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
+import org.apache.knox.gateway.topology.Topology;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import java.util.Enumeration;
+import java.util.Properties;
+
+import static org.junit.Assert.assertEquals;
+
+public class HadoopAuthFilterTest {
+  @Test
+  public void testHadoopAuthFilterAliases() throws Exception {
+    String aliasKey = "signature.secret";
+    String aliasConfigKey = "${ALIAS=" + aliasKey + "}";
+    String aliasValue = "password";
+
+    Topology topology = new Topology();
+    topology.setName("Sample");
+
+    DeploymentContext context = EasyMock.createNiceMock(DeploymentContext.class);
+    EasyMock.expect(context.getTopology()).andReturn(topology).anyTimes();
+    EasyMock.replay(context);
+
+    String clusterName = context.getTopology().getName();
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getPasswordFromAliasForCluster(clusterName, aliasKey))
+        .andReturn(aliasValue.toCharArray()).anyTimes();
+    EasyMock.replay(as);
+    DefaultCryptoService cryptoService = new DefaultCryptoService();
+    cryptoService.setAliasService(as);
+
+    GatewayServices gatewayServices = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(gatewayServices.getService(ServiceType.CRYPTO_SERVICE)).andReturn(cryptoService).anyTimes();
+
+    HadoopAuthFilter hadoopAuthFilter = new HadoopAuthFilter();
+
+    String configPrefix = "hadoop.auth.config.";
+
+    Properties props = new Properties();
+    props.put("clusterName", clusterName);
+    props.put(configPrefix + "signature.secret", aliasConfigKey);
+    props.put(configPrefix + "test", "abc");
+
+    FilterConfig filterConfig = new HadoopAuthTestFilterConfig(props);
+    Properties configuration = hadoopAuthFilter.getConfiguration(as, configPrefix, filterConfig);
+    assertEquals(aliasValue, configuration.getProperty(aliasKey));
+    assertEquals("abc", configuration.getProperty("test"));
+  }
+
+  private static class HadoopAuthTestFilterConfig implements FilterConfig {
+    Properties props;
+
+    HadoopAuthTestFilterConfig(Properties props) {
+      this.props = props;
+    }
+
+    @Override
+    public String getFilterName() {
+      return null;
+    }
+
+    @Override
+    public ServletContext getServletContext() {
+      return null;
+    }
+
+    @Override
+    public String getInitParameter(String name) {
+      return props.getProperty(name, null);
+    }
+
+    @Override
+    public Enumeration<String> getInitParameterNames() {
+      return (Enumeration<String>)props.propertyNames();
+    }
+  }
+}


Mime
View raw message