sentry-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pras...@apache.org
Subject [09/15] SENTRY-432: Synchronization of HDFS permissions with Sentry permissions. First refresh (Arun Suresh via Prasad Mujumdar)
Date Mon, 13 Oct 2014 23:26:05 GMT
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
new file mode 100644
index 0000000..165892d
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateableAuthzPaths implements AuthzPaths, Updateable<PathsUpdate> {
+  private volatile HMSPaths paths;
+  private final AtomicLong seqNum = new AtomicLong(0);
+
+  private static Logger LOG = LoggerFactory.getLogger(UpdateableAuthzPaths.class);
+  
+  public UpdateableAuthzPaths(String[] pathPrefixes) {
+    this.paths = new HMSPaths(pathPrefixes);
+  }
+
+  UpdateableAuthzPaths(HMSPaths paths) {
+    this.paths = paths;
+  }
+
+  @Override
+  public boolean isUnderPrefix(String[] pathElements) {
+    return paths.isUnderPrefix(pathElements);
+  }
+
+  @Override
+  public String findAuthzObject(String[] pathElements) {
+    return  paths.findAuthzObject(pathElements);
+  }
+
+  @Override
+  public String findAuthzObjectExactMatch(String[] pathElements) {
+    return  paths.findAuthzObjectExactMatch(pathElements);
+  }
+
+  @Override
+  public UpdateableAuthzPaths updateFull(PathsUpdate update) {
+    UpdateableAuthzPaths other = getPathsDump().initializeFromDump(
+        update.getThriftObject().getPathsDump());
+    other.seqNum.set(update.getSeqNum());
+    return other;
+  }
+
+  @Override
+  public void updatePartial(Iterable<PathsUpdate> updates, ReadWriteLock lock) {
+    lock.writeLock().lock();
+    try {
+      int counter = 0;
+      for (PathsUpdate update : updates) {
+        applyPartialUpdate(update);
+        if (++counter > 99) {
+          counter = 0;
+          lock.writeLock().unlock();
+          lock.writeLock().lock();
+        }
+        seqNum.set(update.getSeqNum());
+        LOG.warn("##### Updated paths seq Num [" + seqNum.get() + "]");
+      }
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  private void applyPartialUpdate(PathsUpdate update) {
+    for (TPathChanges pathChanges : update.getPathChanges()) {
+      paths.addPathsToAuthzObject(pathChanges.getAuthzObj(), pathChanges
+          .getAddPaths(), true);
+      List<List<String>> delPaths = pathChanges.getDelPaths();
+      if ((delPaths.size() == 1) && (delPaths.get(0).size() == 1)
+          && (delPaths.get(0).get(0).equals(PathsUpdate.ALL_PATHS))) {
+        // Remove all paths.. eg. drop table
+        paths.deleteAuthzObject(pathChanges.getAuthzObj());
+      } else {
+        paths.deletePathsFromAuthzObject(pathChanges.getAuthzObj(), pathChanges
+            .getDelPaths());
+      }
+    }
+  }
+
+  @Override
+  public long getLastUpdatedSeqNum() {
+    return seqNum.get();
+  }
+
+  @Override
+  public PathsUpdate createFullImageUpdate(long currSeqNum) {
+    PathsUpdate pathsUpdate = new PathsUpdate(currSeqNum, true);
+    pathsUpdate.getThriftObject().setPathsDump(getPathsDump().createPathsDump());
+    return pathsUpdate;
+  }
+
+  @Override
+  public AuthzPathsDumper<UpdateableAuthzPaths> getPathsDump() {
+    return new AuthzPathsDumper<UpdateableAuthzPaths>() {
+
+      @Override
+      public TPathsDump createPathsDump() {
+        return UpdateableAuthzPaths.this.paths.getPathsDump().createPathsDump();
+      }
+
+      @Override
+      public UpdateableAuthzPaths initializeFromDump(TPathsDump pathsDump) {
+        return new UpdateableAuthzPaths(new HMSPaths().getPathsDump().initializeFromDump(
+            pathsDump));
+      }
+    };
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/main/resources/sentry_hdfs_service.thrift
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/resources/sentry_hdfs_service.thrift b/sentry-hdfs/sentry-hdfs-common/src/main/resources/sentry_hdfs_service.thrift
new file mode 100644
index 0000000..9212b64
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/main/resources/sentry_hdfs_service.thrift
@@ -0,0 +1,87 @@
+#!/usr/local/bin/thrift -java
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#
+# Thrift Service that the MetaStore is built on
+#
+
+include "share/fb303/if/fb303.thrift"
+
+namespace java org.apache.sentry.hdfs.service.thrift
+namespace php sentry.hdfs.thrift
+namespace cpp Apache.Sentry.HDFS.Thrift
+
+struct TPathChanges {
+1: required string authzObj;
+2: required list<list<string>> addPaths;
+3: required list<list<string>> delPaths;
+}
+
+struct TPathEntry {
+1: required byte type;
+2: required string pathElement;
+3: optional string authzObj;
+4: required set<i32> children;
+}
+
+struct TPathsDump {
+1: required i32 rootId;
+2: required map<i32,TPathEntry> nodeMap;
+}
+
+struct TPathsUpdate {
+1: required bool hasFullImage;
+2: optional TPathsDump pathsDump;
+3: required i64 seqNum;
+4: required list<TPathChanges> pathChanges;
+}
+
+struct TPrivilegeChanges {
+1: required string authzObj;
+2: required map<string, string> addPrivileges;
+3: required map<string, string> delPrivileges;
+}
+
+struct TRoleChanges {
+1: required string role;
+2: required list<string> addGroups;
+3: required list<string> delGroups;
+}
+
+struct TPermissionsUpdate {
+1: required bool hasfullImage;
+2: required i64 seqNum;
+3: required map<string, TPrivilegeChanges> privilegeChanges;
+4: required map<string, TRoleChanges> roleChanges; 
+}
+
+struct TAuthzUpdateResponse {
+1: optional list<TPathsUpdate> authzPathUpdate,
+2: optional list<TPermissionsUpdate> authzPermUpdate,
+}
+
+service SentryHDFSService
+{
+  # HMS Path cache
+  void handle_hms_notification(1:TPathsUpdate pathsUpdate);
+
+  TAuthzUpdateResponse get_all_authz_updates_from(1:i64 permSeqNum, 2:i64 pathSeqNum);
+  map<string, list<string>> get_all_related_paths(1:string path, 2:bool exactMatch);
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
new file mode 100644
index 0000000..29868ae
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
@@ -0,0 +1,357 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestHMSPaths {
+
+  @Test
+  public void testGetPathElements() {
+    List<String> as2 = HMSPaths.getPathElements(new String("/a/b"));
+    List<String> as1 = HMSPaths.getPathElements(new String("/a/b"));
+    Assert.assertEquals(as1, as2);
+
+    List<String> as = HMSPaths.getPathElements(new String("/a/b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("//a/b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("/a//b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("/a/b/"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("//a//b//"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+  }
+
+  @Test
+  public void testEntryType() {
+    Assert.assertTrue(HMSPaths.EntryType.DIR.isRemoveIfDangling());
+    Assert.assertFalse(HMSPaths.EntryType.PREFIX.isRemoveIfDangling());
+    Assert.assertFalse(
+        HMSPaths.EntryType.AUTHZ_OBJECT.isRemoveIfDangling());
+  }
+  
+  @Test
+  public void testRootEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    root.toString();
+    Assert.assertNull(root.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.DIR, root.getType());
+    Assert.assertNull(root.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR, root.getFullPath());
+    Assert.assertTrue(root.getChildren().isEmpty());
+    root.delete();
+    try {
+      root.find(null, true);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(new String[0], true);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(null, false);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(new String[0], false);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    Assert.assertNull(root.find(new String[]{"a"}, true));
+    Assert.assertNull(root.find(new String[]{"a"}, false));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("a")));
+
+    root.delete();
+  }
+
+  @Test
+  public void testRootPrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(true);
+    root.toString();
+
+    Assert.assertNull(root.find(new String[]{"a"}, true));
+    Assert.assertNull(root.find(new String[]{"a"}, false));
+    Assert.assertEquals(root, root.findPrefixEntry(Lists.newArrayList("a")));
+    Assert.assertEquals(root, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+  }
+
+  @Test
+  public void testImmediatePrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry entry = root.createPrefix(Lists.newArrayList("a"));
+    entry.toString();
+    
+    Assert.assertEquals(1, root.getChildren().size());
+
+    Assert.assertEquals(root, entry.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.PREFIX, entry.getType());
+    Assert.assertEquals("a", entry.getPathElement());
+    Assert.assertNull(entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a", entry.getFullPath());
+    Assert.assertTrue(entry.getChildren().isEmpty());
+
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a")));
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+
+    Assert.assertNull(root.find(new String[]{"b"}, false));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("b")));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    entry.delete();
+    Assert.assertTrue(root.getChildren().isEmpty());
+  }
+
+  @Test
+  public void testFurtherPrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry entry = root.createPrefix(Lists.newArrayList("a", "b"));
+    entry.toString();
+
+    Assert.assertEquals(1, root.getChildren().size());
+
+    Assert.assertEquals(root, entry.getParent().getParent());
+    Assert.assertEquals(HMSPaths.EntryType.PREFIX, entry.getType());
+    Assert.assertEquals(HMSPaths.EntryType.DIR, 
+        entry.getParent().getType());
+    Assert.assertEquals("b", entry.getPathElement());
+    Assert.assertEquals("a", entry.getParent().getPathElement());
+    Assert.assertNull(entry.getAuthzObj());
+    Assert.assertNull(entry.getParent().getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b", 
+        entry.getFullPath());
+    Assert.assertEquals(Path.SEPARATOR + "a", entry.getParent().getFullPath());
+    Assert.assertTrue(entry.getChildren().isEmpty());
+    Assert.assertEquals(1, entry.getParent().getChildren().size());
+
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("a")));
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "c"}, false));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    entry.delete();
+    Assert.assertTrue(root.getChildren().isEmpty());
+  }
+
+  @Test
+  public void testImmediateAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry entry = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "p1"), "A");
+    Assert.assertEquals(prefix, entry.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" +
+        Path.SEPARATOR + "p1", entry.getFullPath());
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "p1", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, true));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, false));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1", "c"}, 
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "p1")));
+
+    root.find(new String[]{"a", "b", "p1"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "p1")));
+
+  }
+
+  @Test
+  public void testFurtherAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry entry = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1"), "A");
+    Assert.assertEquals(prefix, entry.getParent().getParent());
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" +
+        Path.SEPARATOR + "t" + Path.SEPARATOR + "p1", entry.getFullPath());
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "p1", "t", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    HMSPaths.Entry ep2 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1", "p2"), "A");
+
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, ep2.getType());
+    Assert.assertEquals("p2", ep2.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"},
+        true));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"},
+        false));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
+        true));
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
+        false));
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "p2", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1", "p2")));
+
+    root.find(new String[]{"a", "b", "t", "p1"}, false).delete();
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"},
+        true));
+    Assert.assertEquals(HMSPaths.EntryType.DIR, entry.getType());
+    Assert.assertNull(entry.getAuthzObj());
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertNotNull(root.find(new String[]{"a", "b", "t", "p1", "p2"}, false));
+    root.find(new String[]{"a", "b", "t", "p1", "p2"}, false).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+  }
+
+  @Test
+  public void testMultipleAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry e1 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1"), "A");
+    HMSPaths.Entry e2 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p2"), "A");
+
+
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, true));
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, 
+        false));
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, true));
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, 
+        false));
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p2")));
+
+    root.find(new String[]{"a", "b", "t", "p1"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+
+    root.find(new String[]{"a", "b", "t", "p2"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p3")));
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
new file mode 100644
index 0000000..dcd70c1
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import junit.framework.Assert;
+
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+import org.apache.thrift.TDeserializer;
+import org.apache.thrift.TException;
+import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestHMSPathsFullDump {
+
+  @Test
+  public void testDumpAndInitialize() {
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/user/hive/warehouse"});
+    hmsPaths._addAuthzObject("db1", Lists.newArrayList("/user/hive/warehouse/db1"));
+    hmsPaths._addAuthzObject("db1.tbl11", Lists.newArrayList("/user/hive/warehouse/db1/tbl11"));
+    hmsPaths._addPathsToAuthzObject("db1.tbl11", Lists.newArrayList(
+        "/user/hive/warehouse/db1/tbl11/part111",
+        "/user/hive/warehouse/db1/tbl11/part112",
+        "/user/hive/warehouse/db1/tbl11/p1=1/p2=x"));
+    
+    Assert.assertEquals("db1", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "part111"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "part112"}, false));
+
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "p1=1", "p2=x"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "p1=1"}, true));
+
+    HMSPathsSerDe serDe = hmsPaths.getPathsDump();
+    TPathsDump pathsDump = serDe.createPathsDump();
+    HMSPaths hmsPaths2 = serDe.initializeFromDump(pathsDump);
+
+    Assert.assertEquals("db1", hmsPaths2.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "part111"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive", "warehouse", "db1", "tbl11", "part112"}, false));
+  }
+
+  @Test
+  public void testThrftSerialization() throws TException {
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/"});
+    String prefix = "/user/hive/warehouse/";
+    for (int dbNum = 0; dbNum < 1; dbNum++) {
+      String dbName = "db" + dbNum;
+      hmsPaths._addAuthzObject(dbName, Lists.newArrayList(prefix + dbName));
+      for (int tblNum = 0; tblNum < 1000000; tblNum++) {
+        String tblName = "tbl" + tblNum;
+        hmsPaths._addAuthzObject(dbName + "." + tblName, Lists.newArrayList(prefix + dbName + "/" + tblName));
+        for (int partNum = 0; partNum < 1; partNum++) {
+          String partName = "part" + partNum;
+          hmsPaths
+              ._addPathsToAuthzObject(
+                  dbName + "." + tblName,
+                  Lists.newArrayList(prefix + dbName + "/" + tblName + "/"
+                      + partName));
+        }
+      }
+    }
+    HMSPathsSerDe serDe = hmsPaths.getPathsDump();
+    long t1 = System.currentTimeMillis();
+    TPathsDump pathsDump = serDe.createPathsDump();
+    byte[] ser = new TSerializer(new TCompactProtocol.Factory()).serialize(pathsDump);
+    long serTime = System.currentTimeMillis() - t1;
+    System.out.println("Serialization Time: " + serTime + ", " + ser.length);
+
+    t1 = System.currentTimeMillis();
+    TPathsDump tPathsDump = new TPathsDump();
+    new TDeserializer(new TCompactProtocol.Factory()).deserialize(tPathsDump, ser);
+    HMSPaths fromDump = serDe.initializeFromDump(tPathsDump);
+    System.out.println("Deserialization Time: " + (System.currentTimeMillis() - t1));
+    Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new String[]{"user", "hive", "warehouse", "db0", "tbl999"}, false));
+    Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new String[]{"user", "hive", "warehouse", "db0", "tbl999", "part5"}, false));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
new file mode 100644
index 0000000..9d0d366
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestUpdateableAuthzPaths {
+
+  @Test
+  public void testFullUpdate() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    assertEquals("db1", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part112"}));
+
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    PathsUpdate update = new PathsUpdate(1, true);
+    update.getThriftObject().setPathsDump(authzPaths.getPathsDump().createPathsDump());
+
+    UpdateableAuthzPaths authzPaths2 = new UpdateableAuthzPaths(new String[] {"/"});
+    UpdateableAuthzPaths pre = authzPaths2.updateFull(update);
+    assertFalse(pre == authzPaths2);
+    authzPaths2 = pre;
+
+    assertEquals("db1", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part112"}));
+
+    // Ensure Full Update wipes old stuff
+    UpdateableAuthzPaths authzPaths3 = new UpdateableAuthzPaths(createBaseHMSPaths(2, 1));
+    update = new PathsUpdate(2, true);
+    update.getThriftObject().setPathsDump(authzPaths3.getPathsDump().createPathsDump());
+    pre = authzPaths2.updateFull(update);
+    assertFalse(pre == authzPaths2);
+    authzPaths2 = pre;
+
+    assertNull(authzPaths2.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertNull(authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+
+    assertEquals("db2", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21", "part211"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21", "part212"}));
+  }
+
+  @Test
+  public void testPartialUpdateAddPath() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+    // Create table
+    PathsUpdate update = new PathsUpdate(2, false);
+    TPathChanges pathChange = update.newPathChange("db1.tbl12");
+    pathChange.addToAddPaths(PathsUpdate.cleanPath("file:///db1/tbl12"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+    
+    // Add partition
+    update = new PathsUpdate(3, false);
+    pathChange = update.newPathChange("db1.tbl12");
+    pathChange.addToAddPaths(PathsUpdate.cleanPath("file:///db1/tbl12/part121"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+
+    // Ensure no change in existing Paths
+    assertEquals("db1", authzPaths.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part112"}));
+
+    // Verify new Paths
+    assertEquals("db1.tbl12", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl12"}));
+    assertEquals("db1.tbl12", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl12", "part121"}));
+  }
+
+  @Test
+  public void testPartialUpdateDelPath() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+    
+    // Drop partition
+    PathsUpdate update = new PathsUpdate(2, false);
+    TPathChanges pathChange = update.newPathChange("db1.tbl11");
+    pathChange.addToDelPaths(PathsUpdate.cleanPath("file:///db1/tbl11/part111"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+
+    // Verify Paths deleted
+    assertNull(authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+
+    // Verify rest ok
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part112"}));
+  }
+
+  private HMSPaths createBaseHMSPaths(int dbNum, int tblNum) {
+    String db = "db" + dbNum;
+    String tbl = "tbl" + dbNum + "" + tblNum;
+    String fullTbl = db + "." + tbl;
+    String dbPath = "/" + db;
+    String tblPath = "/" + db + "/" + tbl;
+    String partPath = tblPath + "/part" + dbNum + "" + tblNum;
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/"});
+    hmsPaths._addAuthzObject(db, Lists.newArrayList(dbPath));
+    hmsPaths._addAuthzObject(fullTbl, Lists.newArrayList(tblPath));
+    hmsPaths._addPathsToAuthzObject(fullTbl, Lists.newArrayList(
+        partPath + "1", partPath + "2" ));
+    return hmsPaths;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-common/src/test/resources/hdfs-sentry.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/resources/hdfs-sentry.xml b/sentry-hdfs/sentry-hdfs-common/src/test/resources/hdfs-sentry.xml
new file mode 100644
index 0000000..c23a431
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-common/src/test/resources/hdfs-sentry.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+  <!-- dummy file that gets rewritten by testcases in target test classpath -->
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/.gitignore
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/.gitignore b/sentry-hdfs/sentry-hdfs-namenode-plugin/.gitignore
new file mode 100644
index 0000000..91ad75b
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/.gitignore
@@ -0,0 +1,18 @@
+*.class
+target/
+.classpath
+.project
+.settings
+.metadata
+.idea/
+*.iml
+derby.log
+datanucleus.log
+sentry-core/sentry-core-common/src/gen
+**/TempStatsStore/
+# Package Files #
+*.jar
+*.war
+*.ear
+test-output/
+maven-repo/

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml b/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
new file mode 100644
index 0000000..4e5f6b0
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
@@ -0,0 +1,73 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry-hdfs</artifactId>
+    <version>1.5.0-incubating-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>sentry-hdfs-namenode-plugin</artifactId>
+  <name>Sentry HDFS Namenode Plugin</name>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs-common</artifactId>
+      <version>1.5.0-incubating-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-service-client</artifactId>
+      <version>1.5.0-incubating-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs-service</artifactId>
+      <version>1.5.0-incubating-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
new file mode 100644
index 0000000..9f219ce
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public class SentryAuthorizationConstants {
+
+  public static final String CONFIG_FILE = "hdfs-sentry.xml";
+
+  public static final String CONFIG_PREFIX = "sentry.authorization-provider.";
+
+  public static final String HDFS_USER_KEY = CONFIG_PREFIX + "hdfs-user";
+  public static final String HDFS_USER_DEFAULT = "hive";
+
+  public static final String HDFS_GROUP_KEY = CONFIG_PREFIX + "hdfs-group";
+  public static final String HDFS_GROUP_DEFAULT = "hive";
+
+  public static final String HDFS_PERMISSION_KEY = CONFIG_PREFIX + 
+      "hdfs-permission";
+  public static final long HDFS_PERMISSION_DEFAULT = 0770;
+
+  public static final String HDFS_PATH_PREFIXES_KEY = CONFIG_PREFIX + 
+      "hdfs-path-prefixes";
+  public static final String[] HDFS_PATH_PREFIXES_DEFAULT = new String[0];
+
+  public static final String CACHE_REFRESH_INTERVAL_KEY = CONFIG_PREFIX + 
+      "cache-refresh-interval.ms";
+  public static final int CACHE_REFRESH_INTERVAL_DEFAULT = 500;
+
+  public static final String CACHE_STALE_THRESHOLD_KEY = CONFIG_PREFIX + 
+      "cache-stale-threshold.ms";
+  public static final int CACHE_STALE_THRESHOLD_DEFAULT = 60 * 1000;
+
+  public static final String CACHE_REFRESH_RETRY_WAIT_KEY = CONFIG_PREFIX +
+      "cache-refresh-retry-wait.ms";
+  public static final int CACHE_REFRESH_RETRY_WAIT_DEFAULT = 30 * 1000;
+
+  public static final String INCLUDE_HDFS_AUTHZ_AS_ACL_KEY = CONFIG_PREFIX + 
+      "include-hdfs-authz-as-acl";
+  public static final boolean INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT = true;
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
new file mode 100644
index 0000000..23e06dd
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sentry.hdfs.SentryHDFSServiceClient.SentryAuthzUpdate;
+import org.apache.sentry.hdfs.Updateable.Update;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+
+public class SentryAuthorizationInfo implements Runnable {
+  private static Logger LOG =
+      LoggerFactory.getLogger(SentryAuthorizationInfo.class);
+
+  private SentryUpdater updater;
+  private volatile UpdateableAuthzPaths authzPaths;
+  private volatile UpdateableAuthzPermissions authzPermissions;
+
+  private int refreshIntervalMillisec;
+  private int staleThresholdMillisec;
+  private int retryWaitMillisec;
+  private ScheduledExecutorService executor;
+  private volatile long lastUpdate;
+  private volatile long waitUntil;
+  private volatile long lastStaleReport;
+  // We don't need a re-entrant lock.. but we do need a ReadWriteLock
+  // Unfortunately, the ReentrantReadWriteLick is the only available
+  // concrete implementation of a ReadWriteLock.
+  private final ReadWriteLock lock = new ReentrantReadWriteLock();
+
+  @VisibleForTesting
+  SentryAuthorizationInfo() {}
+
+  public SentryAuthorizationInfo(Configuration conf) throws Exception {
+    String[] pathPrefixes = conf.getTrimmedStrings(
+        SentryAuthorizationConstants.HDFS_PATH_PREFIXES_KEY, 
+        SentryAuthorizationConstants.HDFS_PATH_PREFIXES_DEFAULT);
+    if (pathPrefixes.length == 0) {
+      LOG.warn("There are not HDFS path prefixes configured in [{}], "
+          + "Sentry authorization won't be enforced on any HDFS location",
+          SentryAuthorizationConstants.HDFS_PATH_PREFIXES_KEY);
+    } else {
+      refreshIntervalMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_REFRESH_INTERVAL_KEY,
+          SentryAuthorizationConstants.CACHE_REFRESH_INTERVAL_DEFAULT);
+      staleThresholdMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_STALE_THRESHOLD_KEY,
+          SentryAuthorizationConstants.CACHE_STALE_THRESHOLD_DEFAULT);
+      retryWaitMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_REFRESH_RETRY_WAIT_KEY,
+          SentryAuthorizationConstants.CACHE_REFRESH_RETRY_WAIT_DEFAULT);
+
+      LOG.debug("Sentry authorization will enforced in the following HDFS " +
+          "locations: [{}]", StringUtils.arrayToString(pathPrefixes));
+      LOG.debug("Refresh interval [{}]ms, retry wait [{}], stale threshold " +
+              "[{}]ms", new Object[] 
+          {refreshIntervalMillisec, retryWaitMillisec, staleThresholdMillisec});
+
+      authzPaths = new UpdateableAuthzPaths(pathPrefixes);
+      authzPermissions = new UpdateableAuthzPermissions();
+      waitUntil = System.currentTimeMillis();
+      lastStaleReport = 0;
+      updater = new SentryUpdater(conf, this);
+    }
+  }
+
+  UpdateableAuthzPaths getAuthzPaths() {
+    return authzPaths;
+  }
+
+  UpdateableAuthzPermissions getAuthzPermissions() {
+    return authzPermissions;
+  }
+
+  private void update() {
+    SentryAuthzUpdate updates = updater.getUpdates();
+    UpdateableAuthzPaths newAuthzPaths = processUpdates(
+        updates.getPathUpdates(), authzPaths);
+    UpdateableAuthzPermissions newAuthzPerms = processUpdates(
+        updates.getPermUpdates(), authzPermissions);
+    // If there were any FULL updates the returned instance would be
+    // different
+    if ((newAuthzPaths != authzPaths)||(newAuthzPerms != authzPermissions)) {
+      lock.writeLock().lock();
+      try {
+        authzPaths = newAuthzPaths;
+        LOG.warn("##### FULL Updated paths seq Num [" + authzPaths.getLastUpdatedSeqNum() + "]");
+        authzPermissions = newAuthzPerms;
+        LOG.warn("##### FULL Updated perms seq Num [" + authzPermissions.getLastUpdatedSeqNum() + "]");
+      } finally {
+        lock.writeLock().unlock();
+      }
+    }
+
+  }
+
+  private <K extends Update, V extends Updateable<K>> V processUpdates(List<K> updates,
+      V updateable) {
+    // In a list of Updates, if there is a full Update, it will be the first
+    // one in the List.. all the remaining will be partial updates
+    if (updates.size() > 0) {
+      if (updates.get(0).hasFullImage()) {
+        updateable = (V)updateable.updateFull(updates.remove(0));
+      }
+      // Any more elements ?
+      if (!updates.isEmpty()) {
+        updateable.updatePartial(updates, lock);
+      }
+    }
+    return updateable;
+  }
+
+  public void run() {
+    try {
+      // In case of previous preUpdate failure, we sleep for a retry wait 
+      // interval we can do this because we are using a singledthreadedexecutor
+      // and scheduling the runs with fixed delay.
+      long currTime = System.currentTimeMillis();
+      if (waitUntil > currTime) {
+        Thread.sleep(waitUntil - currTime);
+      }
+      update();
+      // we reset lastUpdate only on successful pulling
+      lastUpdate = System.currentTimeMillis();
+      waitUntil = lastUpdate;
+    } catch (Exception ex) {
+      LOG.warn("Failed to update, will retry in [{}]ms, error: ", 
+          new Object[]{ retryWaitMillisec, ex.getMessage(), ex});
+      waitUntil = System.currentTimeMillis() + retryWaitMillisec;
+    }
+  }
+
+  public void start() {
+    if (authzPaths != null) {
+      try {
+        update();
+      } catch (Exception ex) {
+        LOG.warn("Failed to do initial update, will retry in [{}]ms, error: ",
+            new Object[]{retryWaitMillisec, ex.getMessage(), ex});
+        waitUntil = System.currentTimeMillis() + retryWaitMillisec;
+      }
+      executor = Executors.newSingleThreadScheduledExecutor(
+          new ThreadFactory() {
+            @Override
+            public Thread newThread(Runnable r) {
+              Thread t = new Thread(r, SentryAuthorizationInfo.class.getName() +
+                  "-refresher");
+              t.setDaemon(true);
+              return t;
+            }
+          }
+      );
+      executor.scheduleWithFixedDelay(this, refreshIntervalMillisec, 
+          refreshIntervalMillisec, TimeUnit.MILLISECONDS);
+    }
+  }
+
+  public void stop() {
+    if (authzPaths != null) {
+      executor.shutdownNow();
+    }
+  }
+
+  public boolean isStale() {
+    long now = System.currentTimeMillis();
+    boolean stale = now - lastUpdate > staleThresholdMillisec;
+    if (stale && now - lastStaleReport > 30 * 1000) {
+      LOG.warn("Authorization information has been stale for [{}]s", 
+          (now - lastUpdate) / 1000);
+      lastStaleReport = now;
+    }
+    return stale;
+  }
+
+  public boolean isManaged(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      return authzPaths.isUnderPrefix(pathElements);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public boolean doesBelongToAuthzObject(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      return authzPaths.findAuthzObject(pathElements) != null;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public List<AclEntry> getAclEntries(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      String authzObj = authzPaths.findAuthzObject(pathElements);
+      return (authzObj != null) ? authzPermissions.getAcls(authzObj) 
+          : Collections.EMPTY_LIST;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
new file mode 100644
index 0000000..3edd5fa
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
@@ -0,0 +1,370 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permission and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.UnresolvedLinkException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.server.namenode.AclFeature;
+import org.apache.hadoop.hdfs.server.namenode.AuthorizationProvider;
+import org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider;
+import org.apache.hadoop.security.AccessControlException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+
+public class SentryAuthorizationProvider 
+    extends AuthorizationProvider implements Configurable {
+  
+  static class SentryAclFeature extends AclFeature {
+    public SentryAclFeature(ImmutableList<AclEntry> entries) {
+      super(entries);
+    }
+  }
+
+  private static Logger LOG = 
+      LoggerFactory.getLogger(SentryAuthorizationProvider.class);
+
+  private boolean started;
+  private Configuration conf;
+  private AuthorizationProvider defaultAuthzProvider;
+  private String user;
+  private String group;
+  private FsPermission permission;
+  private boolean originalAuthzAsAcl;
+  private SentryAuthorizationInfo authzInfo;
+
+  public SentryAuthorizationProvider() {
+    this(null);
+  }
+
+  @VisibleForTesting
+  SentryAuthorizationProvider(SentryAuthorizationInfo authzInfo) {
+    this.authzInfo = authzInfo;
+  }
+  
+  @Override
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
+
+  @Override
+  public synchronized void start() {
+    if (started) {
+      throw new IllegalStateException("Provider already started");
+    }
+    started = true;
+    try {
+      if (!conf.getBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, false)) {
+        throw new RuntimeException("HDFS ACLs must be enabled");
+      }
+
+      defaultAuthzProvider = new DefaultAuthorizationProvider();
+      defaultAuthzProvider.start();
+      // Configuration is read from hdfs-sentry.xml and NN configuration, in
+      // that order of precedence.
+      Configuration conf = new Configuration(this.conf);
+      conf.addResource(SentryAuthorizationConstants.CONFIG_FILE);
+      user = conf.get(SentryAuthorizationConstants.HDFS_USER_KEY,
+          SentryAuthorizationConstants.HDFS_USER_DEFAULT);
+      group = conf.get(SentryAuthorizationConstants.HDFS_GROUP_KEY,
+          SentryAuthorizationConstants.HDFS_GROUP_DEFAULT);
+      permission = FsPermission.createImmutable(
+          (short) conf.getLong(SentryAuthorizationConstants.HDFS_PERMISSION_KEY,
+              SentryAuthorizationConstants.HDFS_PERMISSION_DEFAULT)
+      );
+      originalAuthzAsAcl = conf.getBoolean(
+          SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_KEY,
+          SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT);
+
+      LOG.info("Starting");
+      LOG.info("Config: hdfs-user[{}] hdfs-group[{}] hdfs-permission[{}] " +
+          "include-hdfs-authz-as-acl[{}]", new Object[]
+          {user, group, permission, originalAuthzAsAcl});
+
+      if (authzInfo == null) {
+        authzInfo = new SentryAuthorizationInfo(conf);
+      }
+      authzInfo.start();
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  @Override
+  public synchronized void stop() {
+    LOG.debug("Stopping");
+    authzInfo.stop();
+    defaultAuthzProvider.stop();
+    defaultAuthzProvider = null;
+  }
+
+  @Override
+  public void setSnaphottableDirs(Map<INodeAuthorizationInfo, Integer>
+      snapshotableDirs) {
+    defaultAuthzProvider.setSnaphottableDirs(snapshotableDirs);
+  }
+
+  @Override
+  public void addSnapshottable(INodeAuthorizationInfo dir) {
+    defaultAuthzProvider.addSnapshottable(dir);
+  }
+
+  @Override
+  public void removeSnapshottable(INodeAuthorizationInfo dir) {
+    defaultAuthzProvider.removeSnapshottable(dir);
+  }
+
+  @Override
+  public void createSnapshot(INodeAuthorizationInfo dir, int snapshotId)
+      throws IOException{
+    defaultAuthzProvider.createSnapshot(dir, snapshotId);
+  }
+
+  @Override
+  public void removeSnapshot(INodeAuthorizationInfo dir, int snapshotId)
+      throws IOException {
+    defaultAuthzProvider.removeSnapshot(dir, snapshotId);
+  }
+
+  @Override
+  public void checkPermission(String user, Set<String> groups,
+      INodeAuthorizationInfo[] inodes, int snapshotId,
+      boolean doCheckOwner, FsAction ancestorAccess, FsAction parentAccess,
+      FsAction access, FsAction subAccess, boolean ignoreEmptyDir)
+      throws AccessControlException, UnresolvedLinkException {
+    defaultAuthzProvider.checkPermission(user, groups, inodes, snapshotId,
+        doCheckOwner, ancestorAccess, parentAccess, access, subAccess,
+        ignoreEmptyDir);
+  }
+
+  private static final String[] EMPTY_STRING_ARRAY = new String[0];
+  
+  private String[] getPathElements(INodeAuthorizationInfo node) {
+    return getPathElements(node, 0);
+  }
+
+  private String[] getPathElements(INodeAuthorizationInfo node, int idx) {
+    String[] paths;
+    INodeAuthorizationInfo parent = node.getParent();
+    if (parent == null) {
+      paths = (idx > 0) ? new String[idx] : EMPTY_STRING_ARRAY;
+    } else {
+      paths = getPathElements(parent, idx + 1);
+      paths[paths.length - 1 - idx] = node.getLocalName();
+    }
+    return paths;
+  }
+
+  @Override
+  public void setUser(INodeAuthorizationInfo node, String user) {
+    defaultAuthzProvider.setUser(node, user);
+  }
+
+  @Override
+  public String getUser(INodeAuthorizationInfo node, int snapshotId) {
+    String user;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      user = defaultAuthzProvider.getUser(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          user = this.user;
+        } else {
+          user = defaultAuthzProvider.getUser(node, snapshotId);
+        }
+      } else {
+        user = this.user;
+      }
+    }
+    return user;
+  }
+
+  @Override
+  public void setGroup(INodeAuthorizationInfo node, String group) {
+    defaultAuthzProvider.setGroup(node, group);
+  }
+
+  @Override
+  public String getGroup(INodeAuthorizationInfo node, int snapshotId) {
+    String group;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      group = defaultAuthzProvider.getGroup(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          group = this.group;
+        } else {
+          group = defaultAuthzProvider.getGroup(node, snapshotId);
+        }
+      } else {
+        group = this.group;
+      }
+    }
+    return group;
+  }
+
+  @Override
+  public void setPermission(INodeAuthorizationInfo node,
+      FsPermission permission) {
+    defaultAuthzProvider.setPermission(node, permission);
+  }
+
+  @Override
+  public FsPermission getFsPermission(
+      INodeAuthorizationInfo node, int snapshotId) {
+    FsPermission permission;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          permission = this.permission;
+        } else {
+          permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
+        }
+      } else {
+        permission = this.permission;
+      }
+    }
+    return permission;
+  }
+
+  private List<AclEntry> createAclEntries(String user, String group,
+      FsPermission permission) {
+    List<AclEntry> list = new ArrayList<AclEntry>();
+    AclEntry.Builder builder = new AclEntry.Builder();
+    FsPermission fsPerm = new FsPermission(permission);
+    builder.setName(user);
+    builder.setType(AclEntryType.USER);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getUserAction());
+    list.add(builder.build());
+    builder.setName(group);
+    builder.setType(AclEntryType.GROUP);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getGroupAction());
+    list.add(builder.build());
+    builder.setName(null);
+    builder.setType(AclEntryType.OTHER);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getOtherAction());
+    list.add(builder.build());
+    return list;
+  }
+
+  @Override
+  public AclFeature getAclFeature(INodeAuthorizationInfo node, int snapshotId) {
+    AclFeature f = null;
+    String[] pathElements = getPathElements(node);
+    String p = Arrays.toString(pathElements);
+    boolean isManaged = false;
+    boolean isStale = false;
+    boolean hasAuthzObj = false;
+    if (!authzInfo.isManaged(pathElements)) {
+      isManaged = false;
+      f = defaultAuthzProvider.getAclFeature(node, snapshotId);
+    } else {
+      isManaged = true;
+      List<AclEntry> list = new ArrayList<AclEntry>();
+      if (originalAuthzAsAcl) {
+        String user = defaultAuthzProvider.getUser(node, snapshotId);
+        String group = defaultAuthzProvider.getGroup(node, snapshotId);
+        INodeAuthorizationInfo pNode = node.getParent();
+        while  (group == null || pNode != null) {
+          group = defaultAuthzProvider.getGroup(pNode, snapshotId);
+          pNode = pNode.getParent();
+        }
+        FsPermission perm = defaultAuthzProvider.getFsPermission(node, snapshotId);
+        list.addAll(createAclEntries(user, group, perm));
+      }
+      if (!authzInfo.isStale()) { 
+        isStale = false;
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          hasAuthzObj = true;
+          list.addAll(authzInfo.getAclEntries(pathElements));
+          f = new SentryAclFeature(ImmutableList.copyOf(list));
+        } else {
+          hasAuthzObj = false;
+          f = defaultAuthzProvider.getAclFeature(node, snapshotId);
+        }
+      } else {
+        isStale = true;
+        f = new SentryAclFeature(ImmutableList.copyOf(list));
+      }
+    }
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("### getAclEntry [" + (p == null ? "null" : p) + "] : ["
+          + "isManaged=" + isManaged
+          + ",isStale=" + isStale
+          + ",hasAuthzObj=" + hasAuthzObj
+          + ",origAtuhzAsAcl=" + originalAuthzAsAcl + "]"
+          + "[" + (f == null ? "null" : f.getEntries()) + "]");
+    }
+    return f;
+  }
+
+  @Override
+  public void removeAclFeature(INodeAuthorizationInfo node) {
+    AclFeature aclFeature = node.getAclFeature(CURRENT_STATE_ID);
+    if (aclFeature.getClass() != SentryAclFeature.class) {
+      defaultAuthzProvider.removeAclFeature(node);
+    }
+  }
+
+  @Override
+  public void addAclFeature(INodeAuthorizationInfo node, AclFeature f) {
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      defaultAuthzProvider.addAclFeature(node, f);
+    }
+  }
+
+//  @Override 
+//  public boolean doesAllowChanges(INodeAuthorizationInfo node) {
+//    String[] pathElements = getPathElements(node);
+//    if (!authzInfo.isManaged(pathElements)) {
+//      return defaultAuthzProvider.doesAllowChanges(node);
+//    }
+//    return !authzInfo.doesBelongToAuthzObject(getPathElements(node));
+//  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
new file mode 100644
index 0000000..7461f89
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+
+import com.google.common.collect.Lists;
+
+public class SentryPermissions implements AuthzPermissions {
+  
+  public static class PrivilegeInfo {
+    private final String authzObj;
+    private final Map<String, FsAction> roleToPermission = new HashMap<String, FsAction>();
+    public PrivilegeInfo(String authzObj) {
+      this.authzObj = authzObj;
+    }
+    public PrivilegeInfo setPermission(String role, FsAction perm) {
+      roleToPermission.put(role, perm);
+      return this;
+    }
+    public PrivilegeInfo removePermission(String role) {
+      roleToPermission.remove(role);
+      return this;
+    }
+    public FsAction getPermission(String role) {
+      return roleToPermission.get(role);
+    }
+    public Map<String, FsAction> getAllPermissions() {
+      return roleToPermission;
+    }
+    public String getAuthzObj() {
+      return authzObj;
+    }
+  }
+
+  public static class RoleInfo {
+    private final String role;
+    private final Set<String> groups = new HashSet<String>();
+    public RoleInfo(String role) {
+      this.role = role;
+    }
+    public RoleInfo addGroup(String group) {
+      groups.add(group);
+      return this;
+    }
+    public RoleInfo delGroup(String group) {
+      groups.remove(group);
+      return this;
+    }
+    public String getRole() {
+      return role;
+    }
+    public Set<String> getAllGroups() {
+      return groups;
+    }
+  }
+
+  private final Map<String, PrivilegeInfo> privileges = new HashMap<String, PrivilegeInfo>();
+  private final Map<String, RoleInfo> roles = new HashMap<String, RoleInfo>();
+
+  @Override
+  public List<AclEntry> getAcls(String authzObj) {
+    PrivilegeInfo privilegeInfo = privileges.get(authzObj);
+    Map<String, FsAction> groupPerms = new HashMap<String, FsAction>();
+    if (privilegeInfo != null) {
+      for (Map.Entry<String, FsAction> privs : privilegeInfo
+          .getAllPermissions().entrySet()) {
+        constructAclEntry(privs.getKey(), privs.getValue(), groupPerms);
+      }
+    }
+    List<AclEntry> retList = new LinkedList<AclEntry>();
+    for (Map.Entry<String, FsAction> groupPerm : groupPerms.entrySet()) {
+      AclEntry.Builder builder = new AclEntry.Builder();
+      builder.setName(groupPerm.getKey());
+      builder.setType(AclEntryType.GROUP);
+      builder.setScope(AclEntryScope.ACCESS);
+      FsAction action = groupPerm.getValue(); 
+      if ((action == FsAction.READ) || (action == FsAction.WRITE)
+          || (action == FsAction.READ_WRITE)) {
+        action = action.or(FsAction.EXECUTE);
+      }
+      builder.setPermission(action);
+      retList.add(builder.build());
+    }
+    return retList;
+  }
+
+  private void constructAclEntry(String role, FsAction permission,
+      Map<String, FsAction> groupPerms) {
+    RoleInfo roleInfo = roles.get(role);
+    if (roleInfo != null) {
+      for (String group : roleInfo.groups) {
+        FsAction fsAction = groupPerms.get(group);
+        if (fsAction == null) {
+          fsAction = FsAction.NONE;
+        }
+        groupPerms.put(group, fsAction.or(permission));
+      }
+    }
+  }
+
+  public PrivilegeInfo getPrivilegeInfo(String authzObj) {
+    return privileges.get(authzObj);
+  }
+
+  Collection<PrivilegeInfo> getAllPrivileges() {
+    return privileges.values();
+  }
+
+  Collection<RoleInfo> getAllRoles() {
+    return roles.values();
+  }
+
+  public void delPrivilegeInfo(String authzObj) {
+    privileges.remove(authzObj);
+  }
+
+  public void addPrivilegeInfo(PrivilegeInfo privilegeInfo) {
+    privileges.put(privilegeInfo.authzObj, privilegeInfo);
+  }
+
+  public RoleInfo getRoleInfo(String role) {
+    return roles.get(role);
+  }
+
+  public void delRoleInfo(String role) {
+    roles.remove(role);
+  }
+
+  public void addRoleInfo(RoleInfo roleInfo) {
+    roles.put(roleInfo.role, roleInfo);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
new file mode 100644
index 0000000..905553e
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.hdfs.SentryHDFSServiceClient.SentryAuthzUpdate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class SentryUpdater {
+
+  private SentryHDFSServiceClient sentryClient;
+  private final Configuration conf;
+  private final SentryAuthorizationInfo authzInfo;
+
+  private static Logger LOG = LoggerFactory.getLogger(SentryUpdater.class);
+
+  public SentryUpdater(Configuration conf, SentryAuthorizationInfo authzInfo) throws Exception {
+    this.conf = conf;
+    this.authzInfo = authzInfo;
+  }
+
+  public SentryAuthzUpdate getUpdates() {
+    if (sentryClient == null) {
+      try {
+        sentryClient = new SentryHDFSServiceClient(conf);
+      } catch (Exception e) {
+        LOG.error("Error connecting to Sentry ['{}'] !!",
+            e.getMessage());
+        return null;
+      }
+    }
+    try {
+      SentryAuthzUpdate sentryUpdates = sentryClient.getAllUpdatesFrom(
+          authzInfo.getAuthzPermissions().getLastUpdatedSeqNum() + 1,
+          authzInfo.getAuthzPaths().getLastUpdatedSeqNum() + 1);
+      return sentryUpdates;
+    } catch (Exception e)  {
+      sentryClient = null;
+      LOG.error("Error receiving updates from Sentry !!", e);
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
new file mode 100644
index 0000000..90192dc
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.hdfs.SentryPermissions.PrivilegeInfo;
+import org.apache.sentry.hdfs.SentryPermissions.RoleInfo;
+import org.apache.sentry.hdfs.service.thrift.TPrivilegeChanges;
+import org.apache.sentry.hdfs.service.thrift.TRoleChanges;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateableAuthzPermissions implements AuthzPermissions, Updateable<PermissionsUpdate> {
+  private volatile SentryPermissions perms = new SentryPermissions();
+  private final AtomicLong seqNum = new AtomicLong(0);
+  
+  private static Logger LOG = LoggerFactory.getLogger(UpdateableAuthzPermissions.class);
+
+  public static Map<String, FsAction> ACTION_MAPPING = new HashMap<String, FsAction>();
+  static {
+    ACTION_MAPPING.put("ALL", FsAction.ALL);
+    ACTION_MAPPING.put("*", FsAction.ALL);
+    ACTION_MAPPING.put(AccessConstants.SELECT.toUpperCase(), FsAction.READ_EXECUTE);
+    ACTION_MAPPING.put(AccessConstants.INSERT.toUpperCase(), FsAction.WRITE_EXECUTE);
+  }
+
+  @Override
+  public List<AclEntry> getAcls(String authzObj) {
+    return perms.getAcls(authzObj);
+  }
+
+  @Override
+  public UpdateableAuthzPermissions updateFull(PermissionsUpdate update) {
+    UpdateableAuthzPermissions other = new UpdateableAuthzPermissions();
+    other.applyPartialUpdate(update);
+    other.seqNum.set(update.getSeqNum());
+    return other;
+  }
+
+  @Override
+  public void updatePartial(Iterable<PermissionsUpdate> updates, ReadWriteLock lock) {
+    lock.writeLock().lock();
+    try {
+      int counter = 0;
+      for (PermissionsUpdate update : updates) {
+        applyPartialUpdate(update);
+        if (++counter > 99) {
+          counter = 0;
+          lock.writeLock().unlock();
+          lock.writeLock().lock();
+        }
+        seqNum.set(update.getSeqNum());
+        LOG.warn("##### Updated perms seq Num [" + seqNum.get() + "]");
+      }
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+  
+
+  private void applyPartialUpdate(PermissionsUpdate update) {
+    applyPrivilegeUpdates(update);
+    applyRoleUpdates(update);
+  }
+
+  private void applyRoleUpdates(PermissionsUpdate update) {
+    for (TRoleChanges rUpdate : update.getRoleUpdates()) {
+      if (rUpdate.getRole().equals(PermissionsUpdate.ALL_ROLES)) {
+        // Request to remove group from all roles
+        String groupToRemove = rUpdate.getDelGroups().iterator().next();
+        for (RoleInfo rInfo : perms.getAllRoles()) {
+          rInfo.delGroup(groupToRemove);
+        }
+      }
+      RoleInfo rInfo = perms.getRoleInfo(rUpdate.getRole());
+      for (String group : rUpdate.getAddGroups()) {
+        if (rInfo == null) {
+          rInfo = new RoleInfo(rUpdate.getRole());
+        }
+        rInfo.addGroup(group);
+      }
+      if (rInfo != null) {
+        perms.addRoleInfo(rInfo);
+        for (String group : rUpdate.getDelGroups()) {
+          if (group.equals(PermissionsUpdate.ALL_GROUPS)) {
+            perms.delRoleInfo(rInfo.getRole());
+            break;
+          }
+          // If there are no groups to remove, rUpdate.getDelGroups() will
+          // return empty list and this code will not be reached
+          rInfo.delGroup(group);
+        }
+      }
+    }
+  }
+
+  private void applyPrivilegeUpdates(PermissionsUpdate update) {
+    for (TPrivilegeChanges pUpdate : update.getPrivilegeUpdates()) {
+      if (pUpdate.getAuthzObj().equals(PermissionsUpdate.ALL_PRIVS)) {
+        // Request to remove role from all Privileges
+        String roleToRemove = pUpdate.getDelPrivileges().keySet().iterator()
+            .next();
+        for (PrivilegeInfo pInfo : perms.getAllPrivileges()) {
+          pInfo.removePermission(roleToRemove);
+        }
+      }
+      PrivilegeInfo pInfo = perms.getPrivilegeInfo(pUpdate.getAuthzObj());
+      for (Map.Entry<String, String> aMap : pUpdate.getAddPrivileges().entrySet()) {
+        if (pInfo == null) {
+          pInfo = new PrivilegeInfo(pUpdate.getAuthzObj());
+        }
+        FsAction fsAction = pInfo.getPermission(aMap.getKey());
+        if (fsAction == null) {
+          fsAction = getFAction(aMap.getValue());
+        } else {
+          fsAction = fsAction.or(getFAction(aMap.getValue()));
+        }
+        pInfo.setPermission(aMap.getKey(), fsAction);
+      }
+      if (pInfo != null) {
+        perms.addPrivilegeInfo(pInfo);
+        for (Map.Entry<String, String> dMap : pUpdate.getDelPrivileges().entrySet()) {
+          if (dMap.getKey().equals(PermissionsUpdate.ALL_ROLES)) {
+            // Remove all privileges
+            perms.delPrivilegeInfo(pUpdate.getAuthzObj());
+            break;
+          }
+          FsAction fsAction = pInfo.getPermission(dMap.getKey());
+          if (fsAction != null) {
+            fsAction = fsAction.and(getFAction(dMap.getValue()).not());
+            if (FsAction.NONE == fsAction) {
+              pInfo.removePermission(dMap.getKey());
+            } else {
+              pInfo.setPermission(dMap.getKey(), fsAction);
+            }
+          }
+        }
+      }
+    }
+  }
+
+  static FsAction getFAction(String sentryPriv) {
+    String[] strPrivs = sentryPriv.trim().split(",");
+    FsAction retVal = FsAction.NONE;
+    for (String strPriv : strPrivs) {
+      retVal = retVal.or(ACTION_MAPPING.get(strPriv.toUpperCase()));
+    }
+    return retVal;
+  }
+
+  @Override
+  public long getLastUpdatedSeqNum() {
+    return seqNum.get();
+  }
+
+  @Override
+  public PermissionsUpdate createFullImageUpdate(long currSeqNum) {
+    PermissionsUpdate retVal = new PermissionsUpdate(currSeqNum, true);
+    for (PrivilegeInfo pInfo : perms.getAllPrivileges()) {
+      TPrivilegeChanges pUpdate = retVal.addPrivilegeUpdate(pInfo.getAuthzObj());
+      for (Map.Entry<String, FsAction> ent : pInfo.getAllPermissions().entrySet()) {
+        pUpdate.putToAddPrivileges(ent.getKey(), ent.getValue().SYMBOL);
+      }
+    }
+    for (RoleInfo rInfo : perms.getAllRoles()) {
+      TRoleChanges rUpdate = retVal.addRoleUpdate(rInfo.getRole());
+      for (String group : rInfo.getAllGroups()) {
+        rUpdate.addToAddGroups(group);
+      }
+    }
+    return retVal;
+  }
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
new file mode 100644
index 0000000..2085b52
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public class MockSentryAuthorizationProvider extends
+    SentryAuthorizationProvider {
+
+  public MockSentryAuthorizationProvider() {
+    super(new SentryAuthorizationInfoX());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/78787d63/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
new file mode 100644
index 0000000..7a1539b
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+
+public class SentryAuthorizationInfoX extends SentryAuthorizationInfo {
+
+  public SentryAuthorizationInfoX() {
+    super();
+  }
+
+  @Override
+  public void run() {
+    
+  }
+
+  @Override
+  public void start() {
+
+  }
+
+  @Override
+  public void stop() {
+
+  }
+
+  @Override
+  public boolean isStale() {
+    return false;
+  }
+
+  private static final String[] MANAGED = {"user", "authz"};
+  private static final String[] AUTHZ_OBJ = {"user", "authz", "obj"};
+
+  private boolean hasPrefix(String[] prefix, String[] pathElement) {
+    int i = 0;
+    for (; i < prefix.length && i < pathElement.length; i ++) {
+      if (!prefix[i].equals(pathElement[i])) {
+        return false;
+      }
+    }    
+    return (i == prefix.length);
+  }
+  
+  @Override
+  public boolean isManaged(String[] pathElements) {
+    return hasPrefix(MANAGED, pathElements);
+  }
+
+  @Override
+  public boolean doesBelongToAuthzObject(String[] pathElements) {
+    return hasPrefix(AUTHZ_OBJ, pathElements);
+  }
+
+  @Override
+  public List<AclEntry> getAclEntries(String[] pathElements) {
+    AclEntry acl = new AclEntry.Builder().setType(AclEntryType.USER).
+        setPermission(FsAction.ALL).setName("user-authz").
+        setScope(AclEntryScope.ACCESS).build();
+    return Arrays.asList(acl);
+  }
+}


Mime
View raw message