storm-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kabh...@apache.org
Subject [1/3] storm git commit: STORM-2555 handle impersonation properly for HBase delegation token
Date Sat, 24 Jun 2017 04:33:07 GMT
Repository: storm
Updated Branches:
  refs/heads/master 27e3f6a8d -> c2253fbdb


STORM-2555 handle impersonation properly for HBase delegation token


Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/b03118ae
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/b03118ae
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/b03118ae

Branch: refs/heads/master
Commit: b03118aecbb44545574ec649b554d2033d2efa20
Parents: 63e2078
Author: Jungtaek Lim <kabhwan@gmail.com>
Authored: Thu Jun 15 11:12:02 2017 +0900
Committer: Jungtaek Lim <kabhwan@gmail.com>
Committed: Thu Jun 22 15:26:50 2017 +0900

----------------------------------------------------------------------
 .../storm/common/AbstractHadoopAutoCreds.java   |  4 +
 .../storm/hbase/security/AutoHBaseNimbus.java   | 21 +++++-
 .../storm/hbase/security/HBaseSecurityUtil.java |  2 +-
 .../apache/storm/hbase/common/HBaseClient.java  | 10 +--
 .../org/apache/storm/hbase/common/Utils.java    | 36 +++++++++
 .../hbase/trident/state/HBaseMapState.java      | 78 +++++++++++++-------
 6 files changed, 112 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-autocreds/src/main/java/org/apache/storm/common/AbstractHadoopAutoCreds.java
----------------------------------------------------------------------
diff --git a/external/storm-autocreds/src/main/java/org/apache/storm/common/AbstractHadoopAutoCreds.java
b/external/storm-autocreds/src/main/java/org/apache/storm/common/AbstractHadoopAutoCreds.java
index 2ef0cf3..3c69b19 100644
--- a/external/storm-autocreds/src/main/java/org/apache/storm/common/AbstractHadoopAutoCreds.java
+++ b/external/storm-autocreds/src/main/java/org/apache/storm/common/AbstractHadoopAutoCreds.java
@@ -113,6 +113,10 @@ public abstract class AbstractHadoopAutoCreds implements IAutoCredentials,
Crede
                     if (allTokens != null) {
                         for (Token<? extends TokenIdentifier> token : allTokens) {
                             try {
+                                LOG.debug("Current user: {}", UserGroupInformation.getCurrentUser());
+                                LOG.debug("Token from credential: {} / {}", token.toString(),
+                                        token.decodeIdentifier().getUser());
+                                
                                 UserGroupInformation.getCurrentUser().addToken(token);
                                 LOG.info("Added delegation tokens to UGI.");
                             } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/AutoHBaseNimbus.java
----------------------------------------------------------------------
diff --git a/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/AutoHBaseNimbus.java
b/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/AutoHBaseNimbus.java
index a1cf674..ec85135 100644
--- a/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/AutoHBaseNimbus.java
+++ b/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/AutoHBaseNimbus.java
@@ -20,11 +20,15 @@ package org.apache.storm.hbase.security;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.token.TokenUtil;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.storm.Config;
 import org.apache.storm.common.AbstractHadoopNimbusPluginAutoCreds;
 import org.slf4j.Logger;
@@ -87,18 +91,27 @@ public class AutoHBaseNimbus extends AbstractHadoopNimbusPluginAutoCreds
{
                 UserProvider provider = UserProvider.instantiate(hbaseConf);
                 provider.login(HBASE_KEYTAB_FILE_KEY, HBASE_PRINCIPAL_KEY, InetAddress.getLocalHost().getCanonicalHostName());
 
-                LOG.info("Logged into Hbase as principal = " + conf.get(HBASE_PRINCIPAL_KEY));
+                LOG.info("Logged into Hbase as principal = " + hbaseConf.get(HBASE_PRINCIPAL_KEY));
 
                 UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
 
                 final UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(topologySubmitterUser,
ugi);
 
-                if(User.isHBaseSecurityEnabled(hbaseConf)) {
-                    TokenUtil.obtainAndCacheToken(hbaseConf, proxyUser);
+                User user = User.create(proxyUser);
+
+                if(user.isHBaseSecurityEnabled(hbaseConf)) {
+                    final Connection connection = ConnectionFactory.createConnection(hbaseConf,
user);
+                    TokenUtil.obtainAndCacheToken(connection, user);
 
                     LOG.info("Obtained HBase tokens, adding to user credentials.");
 
-                    Credentials credential= proxyUser.getCredentials();
+                    Credentials credential = proxyUser.getCredentials();
+
+                    for (Token<? extends TokenIdentifier> tokenForLog : credential.getAllTokens())
{
+                        LOG.debug("Obtained token info in credential: {} / {}",
+                                tokenForLog.toString(), tokenForLog.decodeIdentifier().getUser());
+                    }
+
                     ByteArrayOutputStream bao = new ByteArrayOutputStream();
                     ObjectOutputStream out = new ObjectOutputStream(bao);
                     credential.write(out);

http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
----------------------------------------------------------------------
diff --git a/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
b/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
index 935aa89..65dcd6f 100644
--- a/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
+++ b/external/storm-autocreds/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
@@ -75,7 +75,7 @@ public class HBaseSecurityUtil {
             }
             return legacyProvider;
         } else {
-            return UserProvider.instantiate(hbaseConfig);
+            return null;
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/HBaseClient.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/HBaseClient.java
b/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/HBaseClient.java
index f6f97b1..1acbc29 100644
--- a/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/HBaseClient.java
+++ b/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/HBaseClient.java
@@ -21,6 +21,9 @@ import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.*;
 import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.storm.hbase.bolt.mapper.HBaseProjectionCriteria;
 import org.apache.storm.hbase.security.HBaseSecurityUtil;
 import org.slf4j.Logger;
@@ -40,12 +43,7 @@ public class HBaseClient implements Closeable{
     public HBaseClient(Map<String, Object> map , final Configuration configuration,
final String tableName) {
         try {
             UserProvider provider = HBaseSecurityUtil.login(map, configuration);
-            this.table = provider.getCurrent().getUGI().doAs(new PrivilegedExceptionAction<HTable>()
{
-                @Override
-                public HTable run() throws IOException {
-                    return new HTable(configuration, tableName);
-                }
-            });
+            this.table = Utils.getTable(provider, configuration, tableName);
         } catch(Exception e) {
             throw new RuntimeException("HBase bolt preparation failed: " + e.getMessage(),
e);
         }

http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/Utils.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/Utils.java b/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/Utils.java
index 8efe098..ab1ec37 100644
--- a/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/Utils.java
+++ b/external/storm-hbase/src/main/java/org/apache/storm/hbase/common/Utils.java
@@ -17,17 +17,53 @@
  */
 package org.apache.storm.hbase.common;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
 import java.math.BigDecimal;
+import java.security.PrivilegedExceptionAction;
 
 public class Utils {
     private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
 
     private Utils(){}
 
+    public static HTable getTable(UserProvider provider, Configuration config, String tableName)
+            throws IOException, InterruptedException {
+        UserGroupInformation ugi;
+        if (provider != null) {
+            ugi = provider.getCurrent().getUGI();
+            LOG.debug("Current USER for provider: {}", ugi.getUserName());
+        } else {
+            // autocreds puts delegation token into current user UGI
+            ugi = UserGroupInformation.getCurrentUser();
+
+            LOG.debug("UGI for current USER : {}", ugi.getUserName());
+            for (Token<? extends TokenIdentifier> token : ugi.getTokens()) {
+                LOG.debug("Token in UGI (delegation token): {} / {}", token.toString(),
+                        token.decodeIdentifier().getUser());
+
+                // use UGI from token
+                ugi = token.decodeIdentifier().getUser();
+                ugi.addToken(token);
+            }
+        }
+
+        return ugi.doAs(new PrivilegedExceptionAction<HTable>() {
+            @Override public HTable run() throws IOException {
+                return new HTable(config, tableName);
+            }
+        });
+    }
+
     public static long toLong(Object obj){
         long l = 0;
         if(obj != null){

http://git-wip-us.apache.org/repos/asf/storm/blob/b03118ae/external/storm-hbase/src/main/java/org/apache/storm/hbase/trident/state/HBaseMapState.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/main/java/org/apache/storm/hbase/trident/state/HBaseMapState.java
b/external/storm-hbase/src/main/java/org/apache/storm/hbase/trident/state/HBaseMapState.java
index 43c5bf0..36077fc 100644
--- a/external/storm-hbase/src/main/java/org/apache/storm/hbase/trident/state/HBaseMapState.java
+++ b/external/storm-hbase/src/main/java/org/apache/storm/hbase/trident/state/HBaseMapState.java
@@ -15,31 +15,50 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.storm.hbase.trident.state;
 
-import org.apache.storm.task.IMetricsContext;
-import org.apache.storm.topology.FailedException;
-import org.apache.storm.tuple.Values;
 import com.google.common.collect.Maps;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.storm.hbase.security.HBaseSecurityUtil;
-import org.apache.storm.hbase.trident.mapper.TridentHBaseMapMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.storm.trident.state.*;
-import org.apache.storm.trident.state.map.*;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
 import java.io.Serializable;
-import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
+import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.storm.hbase.common.Utils;
+import org.apache.storm.hbase.security.HBaseSecurityUtil;
+import org.apache.storm.hbase.trident.mapper.TridentHBaseMapMapper;
+import org.apache.storm.task.IMetricsContext;
+import org.apache.storm.topology.FailedException;
+import org.apache.storm.trident.state.JSONNonTransactionalSerializer;
+import org.apache.storm.trident.state.JSONOpaqueSerializer;
+import org.apache.storm.trident.state.JSONTransactionalSerializer;
+import org.apache.storm.trident.state.OpaqueValue;
+import org.apache.storm.trident.state.Serializer;
+import org.apache.storm.trident.state.State;
+import org.apache.storm.trident.state.StateFactory;
+import org.apache.storm.trident.state.StateType;
+import org.apache.storm.trident.state.TransactionalValue;
+import org.apache.storm.trident.state.map.CachedMap;
+import org.apache.storm.trident.state.map.IBackingMap;
+import org.apache.storm.trident.state.map.MapState;
+import org.apache.storm.trident.state.map.NonTransactionalMap;
+import org.apache.storm.trident.state.map.OpaqueMap;
+import org.apache.storm.trident.state.map.SnapshottableMap;
+import org.apache.storm.trident.state.map.TransactionalMap;
+import org.apache.storm.tuple.Values;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HBaseMapState<T> implements IBackingMap<T> {
     private static Logger LOG = LoggerFactory.getLogger(HBaseMapState.class);
@@ -60,6 +79,13 @@ public class HBaseMapState<T> implements IBackingMap<T> {
     private Serializer<T> serializer;
     private HTable table;
 
+    /**
+     * Constructor.
+     *
+     * @param options HBase State options.
+     * @param map topology config map.
+     * @param partitionNum the number of partition.
+     */
     public HBaseMapState(final Options<T> options, Map map, int partitionNum) {
         this.options = options;
         this.serializer = options.serializer;
@@ -67,7 +93,7 @@ public class HBaseMapState<T> implements IBackingMap<T> {
 
         final Configuration hbConfig = HBaseConfiguration.create();
         Map<String, Object> conf = (Map<String, Object>)map.get(options.configKey);
-        if(conf == null){
+        if (conf == null) {
             LOG.info("HBase configuration not found using key '" + options.configKey + "'");
             LOG.info("Using HBase config from first hbase-site.xml found on classpath.");
         } else {
@@ -79,15 +105,10 @@ public class HBaseMapState<T> implements IBackingMap<T> {
             }
         }
 
-        try{
+        try {
             UserProvider provider = HBaseSecurityUtil.login(map, hbConfig);
-            this.table = provider.getCurrent().getUGI().doAs(new PrivilegedExceptionAction<HTable>()
{
-                @Override
-                public HTable run() throws IOException {
-                    return new HTable(hbConfig, options.tableName);
-                }
-            });
-        } catch(Exception e){
+            this.table = Utils.getTable(provider, hbConfig, options.tableName);
+        } catch (Exception e) {
             throw new RuntimeException("HBase bolt preparation failed: " + e.getMessage(),
e);
         }
 
@@ -166,7 +187,7 @@ public class HBaseMapState<T> implements IBackingMap<T> {
             LOG.info("Preparing HBase State for partition {} of {}.", partitionIndex + 1,
numPartitions);
             IBackingMap state = new HBaseMapState(options, conf, partitionIndex);
 
-            if(options.cacheSize > 0) {
+            if (options.cacheSize > 0) {
                 state = new CachedMap(state, options.cacheSize);
             }
 
@@ -192,7 +213,7 @@ public class HBaseMapState<T> implements IBackingMap<T> {
     @Override
     public List<T> multiGet(List<List<Object>> keys) {
         List<Get> gets = new ArrayList<Get>();
-        for(List<Object> key : keys){
+        for (List<Object> key : keys) {
             byte[] hbaseKey = this.options.mapMapper.rowKey(key);
             String qualifier = this.options.mapMapper.qualifier(key);
 
@@ -209,13 +230,13 @@ public class HBaseMapState<T> implements IBackingMap<T>
{
                 String qualifier = this.options.mapMapper.qualifier(keys.get(i));
                 Result result = results[i];
                 byte[] value = result.getValue(this.options.columnFamily.getBytes(), qualifier.getBytes());
-                if(value != null) {
+                if (value != null) {
                     retval.add(this.serializer.deserialize(value));
                 } else {
                     retval.add(null);
                 }
             }
-        } catch(IOException e){
+        } catch (IOException e) {
             throw new FailedException("IOException while reading from HBase.", e);
         }
         return retval;
@@ -227,7 +248,8 @@ public class HBaseMapState<T> implements IBackingMap<T> {
         for (int i = 0; i < keys.size(); i++) {
             byte[] hbaseKey = this.options.mapMapper.rowKey(keys.get(i));
             String qualifier = this.options.mapMapper.qualifier(keys.get(i));
-            LOG.info("Partiton: {}, Key: {}, Value: {}", new Object[]{this.partitionNum,
new String(hbaseKey), new String(this.serializer.serialize(values.get(i)))});
+            LOG.info("Partiton: {}, Key: {}, Value: {}",
+                    new Object[]{this.partitionNum, new String(hbaseKey), new String(this.serializer.serialize(values.get(i)))});
             Put put = new Put(hbaseKey);
             T val = values.get(i);
             put.add(this.options.columnFamily.getBytes(),


Mime
View raw message