sentry-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pras...@apache.org
Subject [19/25] SENTRY-432: Synchronization of HDFS permissions with Sentry permissions. Initial patch (Arun Suresh via Prasad Mujumdar)
Date Fri, 10 Oct 2014 03:48:04 GMT
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
new file mode 100644
index 0000000..165892d
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateableAuthzPaths implements AuthzPaths, Updateable<PathsUpdate> {
+  private volatile HMSPaths paths;
+  private final AtomicLong seqNum = new AtomicLong(0);
+
+  private static Logger LOG = LoggerFactory.getLogger(UpdateableAuthzPaths.class);
+  
+  public UpdateableAuthzPaths(String[] pathPrefixes) {
+    this.paths = new HMSPaths(pathPrefixes);
+  }
+
+  UpdateableAuthzPaths(HMSPaths paths) {
+    this.paths = paths;
+  }
+
+  @Override
+  public boolean isUnderPrefix(String[] pathElements) {
+    return paths.isUnderPrefix(pathElements);
+  }
+
+  @Override
+  public String findAuthzObject(String[] pathElements) {
+    return  paths.findAuthzObject(pathElements);
+  }
+
+  @Override
+  public String findAuthzObjectExactMatch(String[] pathElements) {
+    return  paths.findAuthzObjectExactMatch(pathElements);
+  }
+
+  @Override
+  public UpdateableAuthzPaths updateFull(PathsUpdate update) {
+    UpdateableAuthzPaths other = getPathsDump().initializeFromDump(
+        update.getThriftObject().getPathsDump());
+    other.seqNum.set(update.getSeqNum());
+    return other;
+  }
+
+  @Override
+  public void updatePartial(Iterable<PathsUpdate> updates, ReadWriteLock lock) {
+    lock.writeLock().lock();
+    try {
+      int counter = 0;
+      for (PathsUpdate update : updates) {
+        applyPartialUpdate(update);
+        if (++counter > 99) {
+          counter = 0;
+          lock.writeLock().unlock();
+          lock.writeLock().lock();
+        }
+        seqNum.set(update.getSeqNum());
+        LOG.warn("##### Updated paths seq Num [" + seqNum.get() + "]");
+      }
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+
+  private void applyPartialUpdate(PathsUpdate update) {
+    for (TPathChanges pathChanges : update.getPathChanges()) {
+      paths.addPathsToAuthzObject(pathChanges.getAuthzObj(), pathChanges
+          .getAddPaths(), true);
+      List<List<String>> delPaths = pathChanges.getDelPaths();
+      if ((delPaths.size() == 1) && (delPaths.get(0).size() == 1)
+          && (delPaths.get(0).get(0).equals(PathsUpdate.ALL_PATHS))) {
+        // Remove all paths.. eg. drop table
+        paths.deleteAuthzObject(pathChanges.getAuthzObj());
+      } else {
+        paths.deletePathsFromAuthzObject(pathChanges.getAuthzObj(), pathChanges
+            .getDelPaths());
+      }
+    }
+  }
+
+  @Override
+  public long getLastUpdatedSeqNum() {
+    return seqNum.get();
+  }
+
+  @Override
+  public PathsUpdate createFullImageUpdate(long currSeqNum) {
+    PathsUpdate pathsUpdate = new PathsUpdate(currSeqNum, true);
+    pathsUpdate.getThriftObject().setPathsDump(getPathsDump().createPathsDump());
+    return pathsUpdate;
+  }
+
+  @Override
+  public AuthzPathsDumper<UpdateableAuthzPaths> getPathsDump() {
+    return new AuthzPathsDumper<UpdateableAuthzPaths>() {
+
+      @Override
+      public TPathsDump createPathsDump() {
+        return UpdateableAuthzPaths.this.paths.getPathsDump().createPathsDump();
+      }
+
+      @Override
+      public UpdateableAuthzPaths initializeFromDump(TPathsDump pathsDump) {
+        return new UpdateableAuthzPaths(new HMSPaths().getPathsDump().initializeFromDump(
+            pathsDump));
+      }
+    };
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/resources/sentry_hdfs_service.thrift
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/resources/sentry_hdfs_service.thrift b/sentry-hdfs/src/main/resources/sentry_hdfs_service.thrift
new file mode 100644
index 0000000..9212b64
--- /dev/null
+++ b/sentry-hdfs/src/main/resources/sentry_hdfs_service.thrift
@@ -0,0 +1,87 @@
+#!/usr/local/bin/thrift -java
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#
+# Thrift Service that the MetaStore is built on
+#
+
+include "share/fb303/if/fb303.thrift"
+
+namespace java org.apache.sentry.hdfs.service.thrift
+namespace php sentry.hdfs.thrift
+namespace cpp Apache.Sentry.HDFS.Thrift
+
+struct TPathChanges {
+1: required string authzObj;
+2: required list<list<string>> addPaths;
+3: required list<list<string>> delPaths;
+}
+
+struct TPathEntry {
+1: required byte type;
+2: required string pathElement;
+3: optional string authzObj;
+4: required set<i32> children;
+}
+
+struct TPathsDump {
+1: required i32 rootId;
+2: required map<i32,TPathEntry> nodeMap;
+}
+
+struct TPathsUpdate {
+1: required bool hasFullImage;
+2: optional TPathsDump pathsDump;
+3: required i64 seqNum;
+4: required list<TPathChanges> pathChanges;
+}
+
+struct TPrivilegeChanges {
+1: required string authzObj;
+2: required map<string, string> addPrivileges;
+3: required map<string, string> delPrivileges;
+}
+
+struct TRoleChanges {
+1: required string role;
+2: required list<string> addGroups;
+3: required list<string> delGroups;
+}
+
+struct TPermissionsUpdate {
+1: required bool hasfullImage;
+2: required i64 seqNum;
+3: required map<string, TPrivilegeChanges> privilegeChanges;
+4: required map<string, TRoleChanges> roleChanges; 
+}
+
+struct TAuthzUpdateResponse {
+1: optional list<TPathsUpdate> authzPathUpdate,
+2: optional list<TPermissionsUpdate> authzPermUpdate,
+}
+
+service SentryHDFSService
+{
+  # HMS Path cache
+  void handle_hms_notification(1:TPathsUpdate pathsUpdate);
+
+  TAuthzUpdateResponse get_all_authz_updates_from(1:i64 permSeqNum, 2:i64 pathSeqNum);
+  map<string, list<string>> get_all_related_paths(1:string path, 2:bool exactMatch);
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
new file mode 100644
index 0000000..29868ae
--- /dev/null
+++ b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPaths.java
@@ -0,0 +1,357 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestHMSPaths {
+
+  @Test
+  public void testGetPathElements() {
+    List<String> as2 = HMSPaths.getPathElements(new String("/a/b"));
+    List<String> as1 = HMSPaths.getPathElements(new String("/a/b"));
+    Assert.assertEquals(as1, as2);
+
+    List<String> as = HMSPaths.getPathElements(new String("/a/b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("//a/b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("/a//b"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("/a/b/"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+
+    as = HMSPaths.getPathElements(new String("//a//b//"));
+    Assert.assertEquals(Lists.newArrayList("a", "b"), as);
+  }
+
+  @Test
+  public void testEntryType() {
+    Assert.assertTrue(HMSPaths.EntryType.DIR.isRemoveIfDangling());
+    Assert.assertFalse(HMSPaths.EntryType.PREFIX.isRemoveIfDangling());
+    Assert.assertFalse(
+        HMSPaths.EntryType.AUTHZ_OBJECT.isRemoveIfDangling());
+  }
+  
+  @Test
+  public void testRootEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    root.toString();
+    Assert.assertNull(root.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.DIR, root.getType());
+    Assert.assertNull(root.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR, root.getFullPath());
+    Assert.assertTrue(root.getChildren().isEmpty());
+    root.delete();
+    try {
+      root.find(null, true);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(new String[0], true);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(null, false);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    try {
+      root.find(new String[0], false);
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+    Assert.assertNull(root.find(new String[]{"a"}, true));
+    Assert.assertNull(root.find(new String[]{"a"}, false));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("a")));
+
+    root.delete();
+  }
+
+  @Test
+  public void testRootPrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(true);
+    root.toString();
+
+    Assert.assertNull(root.find(new String[]{"a"}, true));
+    Assert.assertNull(root.find(new String[]{"a"}, false));
+    Assert.assertEquals(root, root.findPrefixEntry(Lists.newArrayList("a")));
+    Assert.assertEquals(root, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+  }
+
+  @Test
+  public void testImmediatePrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry entry = root.createPrefix(Lists.newArrayList("a"));
+    entry.toString();
+    
+    Assert.assertEquals(1, root.getChildren().size());
+
+    Assert.assertEquals(root, entry.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.PREFIX, entry.getType());
+    Assert.assertEquals("a", entry.getPathElement());
+    Assert.assertNull(entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a", entry.getFullPath());
+    Assert.assertTrue(entry.getChildren().isEmpty());
+
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a")));
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+
+    Assert.assertNull(root.find(new String[]{"b"}, false));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("b")));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    entry.delete();
+    Assert.assertTrue(root.getChildren().isEmpty());
+  }
+
+  @Test
+  public void testFurtherPrefixEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry entry = root.createPrefix(Lists.newArrayList("a", "b"));
+    entry.toString();
+
+    Assert.assertEquals(1, root.getChildren().size());
+
+    Assert.assertEquals(root, entry.getParent().getParent());
+    Assert.assertEquals(HMSPaths.EntryType.PREFIX, entry.getType());
+    Assert.assertEquals(HMSPaths.EntryType.DIR, 
+        entry.getParent().getType());
+    Assert.assertEquals("b", entry.getPathElement());
+    Assert.assertEquals("a", entry.getParent().getPathElement());
+    Assert.assertNull(entry.getAuthzObj());
+    Assert.assertNull(entry.getParent().getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b", 
+        entry.getFullPath());
+    Assert.assertEquals(Path.SEPARATOR + "a", entry.getParent().getFullPath());
+    Assert.assertTrue(entry.getChildren().isEmpty());
+    Assert.assertEquals(1, entry.getParent().getChildren().size());
+
+    Assert.assertEquals(entry, root.findPrefixEntry(Lists.newArrayList("a", "b")));
+    Assert.assertNull(root.findPrefixEntry(Lists.newArrayList("a")));
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "c"}, false));
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    entry.delete();
+    Assert.assertTrue(root.getChildren().isEmpty());
+  }
+
+  @Test
+  public void testImmediateAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry entry = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "p1"), "A");
+    Assert.assertEquals(prefix, entry.getParent());
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" +
+        Path.SEPARATOR + "p1", entry.getFullPath());
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "p1", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, true));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1"}, false));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "p1", "c"}, 
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "p1")));
+
+    root.find(new String[]{"a", "b", "p1"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "p1")));
+
+  }
+
+  @Test
+  public void testFurtherAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry entry = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1"), "A");
+    Assert.assertEquals(prefix, entry.getParent().getParent());
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+    Assert.assertEquals(Path.SEPARATOR + "a" + Path.SEPARATOR + "b" +
+        Path.SEPARATOR + "t" + Path.SEPARATOR + "p1", entry.getFullPath());
+
+    try {
+      root.createPrefix(Lists.newArrayList("a", "b", "p1", "t", "c"));
+      Assert.fail();
+    } catch (IllegalArgumentException ex) {
+      //NOP
+    }
+
+    HMSPaths.Entry ep2 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1", "p2"), "A");
+
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, entry.getType());
+    Assert.assertEquals("p1", entry.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+
+    Assert.assertEquals(HMSPaths.EntryType.AUTHZ_OBJECT, ep2.getType());
+    Assert.assertEquals("p2", ep2.getPathElement());
+    Assert.assertEquals("A", entry.getAuthzObj());
+
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"},
+        true));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1"},
+        false));
+    Assert.assertEquals(entry, root.find(new String[]{"a", "b", "t", "p1", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
+        true));
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2"},
+        false));
+    Assert.assertEquals(ep2, root.find(new String[]{"a", "b", "t", "p1", "p2", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "p2", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1", "p2")));
+
+    root.find(new String[]{"a", "b", "t", "p1"}, false).delete();
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"},
+        true));
+    Assert.assertEquals(HMSPaths.EntryType.DIR, entry.getType());
+    Assert.assertNull(entry.getAuthzObj());
+
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertNotNull(root.find(new String[]{"a", "b", "t", "p1", "p2"}, false));
+    root.find(new String[]{"a", "b", "t", "p1", "p2"}, false).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+  }
+
+  @Test
+  public void testMultipleAuthzEntry() {
+    HMSPaths.Entry root = HMSPaths.Entry.createRoot(false);
+    HMSPaths.Entry prefix = root.createPrefix(Lists.newArrayList("a", "b"));
+
+    HMSPaths.Entry e1 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p1"), "A");
+    HMSPaths.Entry e2 = root.createAuthzObjPath(
+        Lists.newArrayList("a", "b", "t", "p2"), "A");
+
+
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, true));
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1"}, 
+        false));
+    Assert.assertEquals(e1, root.find(new String[]{"a", "b", "t", "p1", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p1")));
+
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, true));
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2"}, 
+        false));
+    Assert.assertEquals(e2, root.find(new String[]{"a", "b", "t", "p2", "c"},
+        true));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2", "c"}, false));
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p2")));
+
+    root.find(new String[]{"a", "b", "t", "p1"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p1"}, false));
+
+    root.find(new String[]{"a", "b", "t", "p2"}, true).delete();
+    Assert.assertNull(root.find(new String[]{"a", "b", "t", "p2"}, false));
+    Assert.assertNull(root.find(new String[]{"a", "b", "t"}, false));
+
+    Assert.assertEquals(prefix, root.findPrefixEntry(
+        Lists.newArrayList("a", "b", "t", "p3")));
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
new file mode 100644
index 0000000..dcd70c1
--- /dev/null
+++ b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import junit.framework.Assert;
+
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+import org.apache.thrift.TDeserializer;
+import org.apache.thrift.TException;
+import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestHMSPathsFullDump {
+
+  @Test
+  public void testDumpAndInitialize() {
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/user/hive/warehouse"});
+    hmsPaths._addAuthzObject("db1", Lists.newArrayList("/user/hive/warehouse/db1"));
+    hmsPaths._addAuthzObject("db1.tbl11", Lists.newArrayList("/user/hive/warehouse/db1/tbl11"));
+    hmsPaths._addPathsToAuthzObject("db1.tbl11", Lists.newArrayList(
+        "/user/hive/warehouse/db1/tbl11/part111",
+        "/user/hive/warehouse/db1/tbl11/part112",
+        "/user/hive/warehouse/db1/tbl11/p1=1/p2=x"));
+    
+    Assert.assertEquals("db1", hmsPaths.findAuthzObject(new String[]{"user", "hive", "warehouse",
"db1"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "part111"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "part112"}, false));
+
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "p1=1", "p2=x"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "p1=1"}, true));
+
+    HMSPathsSerDe serDe = hmsPaths.getPathsDump();
+    TPathsDump pathsDump = serDe.createPathsDump();
+    HMSPaths hmsPaths2 = serDe.initializeFromDump(pathsDump);
+
+    Assert.assertEquals("db1", hmsPaths2.findAuthzObject(new String[]{"user", "hive", "warehouse",
"db1"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "part111"}, false));
+    Assert.assertEquals("db1.tbl11", hmsPaths2.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db1", "tbl11", "part112"}, false));
+  }
+
+  @Test
+  public void testThrftSerialization() throws TException {
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/"});
+    String prefix = "/user/hive/warehouse/";
+    for (int dbNum = 0; dbNum < 1; dbNum++) {
+      String dbName = "db" + dbNum;
+      hmsPaths._addAuthzObject(dbName, Lists.newArrayList(prefix + dbName));
+      for (int tblNum = 0; tblNum < 1000000; tblNum++) {
+        String tblName = "tbl" + tblNum;
+        hmsPaths._addAuthzObject(dbName + "." + tblName, Lists.newArrayList(prefix + dbName
+ "/" + tblName));
+        for (int partNum = 0; partNum < 1; partNum++) {
+          String partName = "part" + partNum;
+          hmsPaths
+              ._addPathsToAuthzObject(
+                  dbName + "." + tblName,
+                  Lists.newArrayList(prefix + dbName + "/" + tblName + "/"
+                      + partName));
+        }
+      }
+    }
+    HMSPathsSerDe serDe = hmsPaths.getPathsDump();
+    long t1 = System.currentTimeMillis();
+    TPathsDump pathsDump = serDe.createPathsDump();
+    byte[] ser = new TSerializer(new TCompactProtocol.Factory()).serialize(pathsDump);
+    long serTime = System.currentTimeMillis() - t1;
+    System.out.println("Serialization Time: " + serTime + ", " + ser.length);
+
+    t1 = System.currentTimeMillis();
+    TPathsDump tPathsDump = new TPathsDump();
+    new TDeserializer(new TCompactProtocol.Factory()).deserialize(tPathsDump, ser);
+    HMSPaths fromDump = serDe.initializeFromDump(tPathsDump);
+    System.out.println("Deserialization Time: " + (System.currentTimeMillis() - t1));
+    Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db0", "tbl999"}, false));
+    Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new String[]{"user", "hive",
"warehouse", "db0", "tbl999", "part5"}, false));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
new file mode 100644
index 0000000..9d0d366
--- /dev/null
+++ b/sentry-hdfs/src/test/java/org/apache/sentry/hdfs/TestUpdateableAuthzPaths.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestUpdateableAuthzPaths {
+
+  @Test
+  public void testFullUpdate() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    assertEquals("db1", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part111"}));
+    assertEquals("db1.tbl11", hmsPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part112"}));
+
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    PathsUpdate update = new PathsUpdate(1, true);
+    update.getThriftObject().setPathsDump(authzPaths.getPathsDump().createPathsDump());
+
+    UpdateableAuthzPaths authzPaths2 = new UpdateableAuthzPaths(new String[] {"/"});
+    UpdateableAuthzPaths pre = authzPaths2.updateFull(update);
+    assertFalse(pre == authzPaths2);
+    authzPaths2 = pre;
+
+    assertEquals("db1", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part111"}));
+    assertEquals("db1.tbl11", authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part112"}));
+
+    // Ensure Full Update wipes old stuff
+    UpdateableAuthzPaths authzPaths3 = new UpdateableAuthzPaths(createBaseHMSPaths(2, 1));
+    update = new PathsUpdate(2, true);
+    update.getThriftObject().setPathsDump(authzPaths3.getPathsDump().createPathsDump());
+    pre = authzPaths2.updateFull(update);
+    assertFalse(pre == authzPaths2);
+    authzPaths2 = pre;
+
+    assertNull(authzPaths2.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertNull(authzPaths2.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+
+    assertEquals("db2", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21",
"part211"}));
+    assertEquals("db2.tbl21", authzPaths2.findAuthzObjectExactMatch(new String[]{"db2", "tbl21",
"part212"}));
+  }
+
+  @Test
+  public void testPartialUpdateAddPath() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+    // Create table
+    PathsUpdate update = new PathsUpdate(2, false);
+    TPathChanges pathChange = update.newPathChange("db1.tbl12");
+    pathChange.addToAddPaths(PathsUpdate.cleanPath("file:///db1/tbl12"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+    
+    // Add partition
+    update = new PathsUpdate(3, false);
+    pathChange = update.newPathChange("db1.tbl12");
+    pathChange.addToAddPaths(PathsUpdate.cleanPath("file:///db1/tbl12/part121"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+
+    // Ensure no change in existing Paths
+    assertEquals("db1", authzPaths.findAuthzObjectExactMatch(new String[]{"db1"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part111"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part112"}));
+
+    // Verify new Paths
+    assertEquals("db1.tbl12", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl12"}));
+    assertEquals("db1.tbl12", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl12",
"part121"}));
+  }
+
+  @Test
+  public void testPartialUpdateDelPath() {
+    HMSPaths hmsPaths = createBaseHMSPaths(1, 1);
+    UpdateableAuthzPaths authzPaths = new UpdateableAuthzPaths(hmsPaths);
+    ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11"}));
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part111"}));
+    
+    // Drop partition
+    PathsUpdate update = new PathsUpdate(2, false);
+    TPathChanges pathChange = update.newPathChange("db1.tbl11");
+    pathChange.addToDelPaths(PathsUpdate.cleanPath("file:///db1/tbl11/part111"));
+    authzPaths.updatePartial(Lists.newArrayList(update), lock);
+
+    // Verify Paths deleted
+    assertNull(authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11", "part111"}));
+
+    // Verify rest ok
+    assertEquals("db1.tbl11", authzPaths.findAuthzObjectExactMatch(new String[]{"db1", "tbl11",
"part112"}));
+  }
+
+  private HMSPaths createBaseHMSPaths(int dbNum, int tblNum) {
+    String db = "db" + dbNum;
+    String tbl = "tbl" + dbNum + "" + tblNum;
+    String fullTbl = db + "." + tbl;
+    String dbPath = "/" + db;
+    String tblPath = "/" + db + "/" + tbl;
+    String partPath = tblPath + "/part" + dbNum + "" + tblNum;
+    HMSPaths hmsPaths = new HMSPaths(new String[] {"/"});
+    hmsPaths._addAuthzObject(db, Lists.newArrayList(dbPath));
+    hmsPaths._addAuthzObject(fullTbl, Lists.newArrayList(tblPath));
+    hmsPaths._addPathsToAuthzObject(fullTbl, Lists.newArrayList(
+        partPath + "1", partPath + "2" ));
+    return hmsPaths;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/test/resources/hdfs-sentry.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/test/resources/hdfs-sentry.xml b/sentry-hdfs/src/test/resources/hdfs-sentry.xml
new file mode 100644
index 0000000..c23a431
--- /dev/null
+++ b/sentry-hdfs/src/test/resources/hdfs-sentry.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+  <!-- dummy file that gets rewritten by testcases in target test classpath -->
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-provider/sentry-provider-db/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/pom.xml b/sentry-provider/sentry-provider-db/pom.xml
index b4167e4..b8da31b 100644
--- a/sentry-provider/sentry-provider-db/pom.xml
+++ b/sentry-provider/sentry-provider-db/pom.xml
@@ -42,6 +42,11 @@ limitations under the License.
       <scope>provided</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+      <version>2.5.0</version>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -72,6 +77,10 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-core-common</artifactId>
     </dependency>
     <dependency>
@@ -80,6 +89,10 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-service-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-provider-common</artifactId>
     </dependency>
     <dependency>
@@ -89,6 +102,11 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>0.13.1-cdh5.2.0-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-shims</artifactId>
       <scope>provided</scope>
     </dependency>
@@ -109,6 +127,11 @@ limitations under the License.
       <artifactId>ant-contrib</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs-int</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-minikdc</artifactId>
       <scope>test</scope>
@@ -138,6 +161,11 @@ limitations under the License.
       <artifactId>mockito-all</artifactId>
       <scope>test</scope>
     </dependency>
+      <dependency>
+        <groupId>org.apache.hive</groupId>
+        <artifactId>hive-metastore</artifactId>
+        <version>${hive.version}</version>
+      </dependency>
   </dependencies>
 
   <build>
@@ -189,68 +217,5 @@ limitations under the License.
       </plugin>
     </plugins>
   </build>
-  <profiles>
-    <profile>
-      <id>thriftif</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>generate-thrift-sources</id>
-                <phase>generate-sources</phase>
-                <configuration>
-                  <target>
-                    <taskdef name="for" classname="net.sf.antcontrib.logic.ForTask"
-                      classpathref="maven.plugin.classpath" />
-                    <property name="thrift.args" value="-I ${thrift.home} --gen java:beans,hashcode"/>
-                    <property name="thrift.gen.dir" value="${basedir}/src/gen/thrift"/>
-                    <delete dir="${thrift.gen.dir}"/>
-                    <mkdir dir="${thrift.gen.dir}"/>
-                    <for param="thrift.file">
-                      <path>
-                        <fileset dir="${basedir}/src/main/resources/" includes="**/*.thrift"
/>
-                      </path>
-                      <sequential>
-                        <echo message="Generating Thrift code for @{thrift.file}"/>
-                        <exec executable="${thrift.home}/bin/thrift"  failonerror="true"
dir=".">
-                          <arg line="${thrift.args} -I ${basedir}/src/main/resources/
-o ${thrift.gen.dir} @{thrift.file} " />
-                        </exec>
-                      </sequential>
-                    </for>
-                  </target>
-                </configuration>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-enforcer-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>enforce-property</id>
-                <goals>
-                  <goal>enforce</goal>
-                </goals>
-                <configuration>
-                  <rules>
-                    <requireProperty>
-                      <property>thrift.home</property>
-                    </requireProperty>
-                  </rules>
-                  <fail>true</fail>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
 
 </project>


Mime
View raw message