sentry-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pras...@apache.org
Subject [20/25] SENTRY-432: Synchronization of HDFS permissions with Sentry permissions. Initial patch (Arun Suresh via Prasad Mujumdar)
Date Fri, 10 Oct 2014 03:48:05 GMT
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TPrivilegeChanges.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TPrivilegeChanges.java b/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TPrivilegeChanges.java
new file mode 100644
index 0000000..76720b9
--- /dev/null
+++ b/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TPrivilegeChanges.java
@@ -0,0 +1,713 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.sentry.hdfs.service.thrift;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TPrivilegeChanges implements org.apache.thrift.TBase<TPrivilegeChanges, TPrivilegeChanges._Fields>, java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPrivilegeChanges");
+
+  private static final org.apache.thrift.protocol.TField AUTHZ_OBJ_FIELD_DESC = new org.apache.thrift.protocol.TField("authzObj", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField ADD_PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("addPrivileges", org.apache.thrift.protocol.TType.MAP, (short)2);
+  private static final org.apache.thrift.protocol.TField DEL_PRIVILEGES_FIELD_DESC = new org.apache.thrift.protocol.TField("delPrivileges", org.apache.thrift.protocol.TType.MAP, (short)3);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new TPrivilegeChangesStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new TPrivilegeChangesTupleSchemeFactory());
+  }
+
+  private String authzObj; // required
+  private Map<String,String> addPrivileges; // required
+  private Map<String,String> delPrivileges; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    AUTHZ_OBJ((short)1, "authzObj"),
+    ADD_PRIVILEGES((short)2, "addPrivileges"),
+    DEL_PRIVILEGES((short)3, "delPrivileges");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // AUTHZ_OBJ
+          return AUTHZ_OBJ;
+        case 2: // ADD_PRIVILEGES
+          return ADD_PRIVILEGES;
+        case 3: // DEL_PRIVILEGES
+          return DEL_PRIVILEGES;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.AUTHZ_OBJ, new org.apache.thrift.meta_data.FieldMetaData("authzObj", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.ADD_PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("addPrivileges", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    tmpMap.put(_Fields.DEL_PRIVILEGES, new org.apache.thrift.meta_data.FieldMetaData("delPrivileges", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TPrivilegeChanges.class, metaDataMap);
+  }
+
+  public TPrivilegeChanges() {
+  }
+
+  public TPrivilegeChanges(
+    String authzObj,
+    Map<String,String> addPrivileges,
+    Map<String,String> delPrivileges)
+  {
+    this();
+    this.authzObj = authzObj;
+    this.addPrivileges = addPrivileges;
+    this.delPrivileges = delPrivileges;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public TPrivilegeChanges(TPrivilegeChanges other) {
+    if (other.isSetAuthzObj()) {
+      this.authzObj = other.authzObj;
+    }
+    if (other.isSetAddPrivileges()) {
+      Map<String,String> __this__addPrivileges = new HashMap<String,String>();
+      for (Map.Entry<String, String> other_element : other.addPrivileges.entrySet()) {
+
+        String other_element_key = other_element.getKey();
+        String other_element_value = other_element.getValue();
+
+        String __this__addPrivileges_copy_key = other_element_key;
+
+        String __this__addPrivileges_copy_value = other_element_value;
+
+        __this__addPrivileges.put(__this__addPrivileges_copy_key, __this__addPrivileges_copy_value);
+      }
+      this.addPrivileges = __this__addPrivileges;
+    }
+    if (other.isSetDelPrivileges()) {
+      Map<String,String> __this__delPrivileges = new HashMap<String,String>();
+      for (Map.Entry<String, String> other_element : other.delPrivileges.entrySet()) {
+
+        String other_element_key = other_element.getKey();
+        String other_element_value = other_element.getValue();
+
+        String __this__delPrivileges_copy_key = other_element_key;
+
+        String __this__delPrivileges_copy_value = other_element_value;
+
+        __this__delPrivileges.put(__this__delPrivileges_copy_key, __this__delPrivileges_copy_value);
+      }
+      this.delPrivileges = __this__delPrivileges;
+    }
+  }
+
+  public TPrivilegeChanges deepCopy() {
+    return new TPrivilegeChanges(this);
+  }
+
+  @Override
+  public void clear() {
+    this.authzObj = null;
+    this.addPrivileges = null;
+    this.delPrivileges = null;
+  }
+
+  public String getAuthzObj() {
+    return this.authzObj;
+  }
+
+  public void setAuthzObj(String authzObj) {
+    this.authzObj = authzObj;
+  }
+
+  public void unsetAuthzObj() {
+    this.authzObj = null;
+  }
+
+  /** Returns true if field authzObj is set (has been assigned a value) and false otherwise */
+  public boolean isSetAuthzObj() {
+    return this.authzObj != null;
+  }
+
+  public void setAuthzObjIsSet(boolean value) {
+    if (!value) {
+      this.authzObj = null;
+    }
+  }
+
+  public int getAddPrivilegesSize() {
+    return (this.addPrivileges == null) ? 0 : this.addPrivileges.size();
+  }
+
+  public void putToAddPrivileges(String key, String val) {
+    if (this.addPrivileges == null) {
+      this.addPrivileges = new HashMap<String,String>();
+    }
+    this.addPrivileges.put(key, val);
+  }
+
+  public Map<String,String> getAddPrivileges() {
+    return this.addPrivileges;
+  }
+
+  public void setAddPrivileges(Map<String,String> addPrivileges) {
+    this.addPrivileges = addPrivileges;
+  }
+
+  public void unsetAddPrivileges() {
+    this.addPrivileges = null;
+  }
+
+  /** Returns true if field addPrivileges is set (has been assigned a value) and false otherwise */
+  public boolean isSetAddPrivileges() {
+    return this.addPrivileges != null;
+  }
+
+  public void setAddPrivilegesIsSet(boolean value) {
+    if (!value) {
+      this.addPrivileges = null;
+    }
+  }
+
+  public int getDelPrivilegesSize() {
+    return (this.delPrivileges == null) ? 0 : this.delPrivileges.size();
+  }
+
+  public void putToDelPrivileges(String key, String val) {
+    if (this.delPrivileges == null) {
+      this.delPrivileges = new HashMap<String,String>();
+    }
+    this.delPrivileges.put(key, val);
+  }
+
+  public Map<String,String> getDelPrivileges() {
+    return this.delPrivileges;
+  }
+
+  public void setDelPrivileges(Map<String,String> delPrivileges) {
+    this.delPrivileges = delPrivileges;
+  }
+
+  public void unsetDelPrivileges() {
+    this.delPrivileges = null;
+  }
+
+  /** Returns true if field delPrivileges is set (has been assigned a value) and false otherwise */
+  public boolean isSetDelPrivileges() {
+    return this.delPrivileges != null;
+  }
+
+  public void setDelPrivilegesIsSet(boolean value) {
+    if (!value) {
+      this.delPrivileges = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case AUTHZ_OBJ:
+      if (value == null) {
+        unsetAuthzObj();
+      } else {
+        setAuthzObj((String)value);
+      }
+      break;
+
+    case ADD_PRIVILEGES:
+      if (value == null) {
+        unsetAddPrivileges();
+      } else {
+        setAddPrivileges((Map<String,String>)value);
+      }
+      break;
+
+    case DEL_PRIVILEGES:
+      if (value == null) {
+        unsetDelPrivileges();
+      } else {
+        setDelPrivileges((Map<String,String>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case AUTHZ_OBJ:
+      return getAuthzObj();
+
+    case ADD_PRIVILEGES:
+      return getAddPrivileges();
+
+    case DEL_PRIVILEGES:
+      return getDelPrivileges();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case AUTHZ_OBJ:
+      return isSetAuthzObj();
+    case ADD_PRIVILEGES:
+      return isSetAddPrivileges();
+    case DEL_PRIVILEGES:
+      return isSetDelPrivileges();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof TPrivilegeChanges)
+      return this.equals((TPrivilegeChanges)that);
+    return false;
+  }
+
+  public boolean equals(TPrivilegeChanges that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_authzObj = true && this.isSetAuthzObj();
+    boolean that_present_authzObj = true && that.isSetAuthzObj();
+    if (this_present_authzObj || that_present_authzObj) {
+      if (!(this_present_authzObj && that_present_authzObj))
+        return false;
+      if (!this.authzObj.equals(that.authzObj))
+        return false;
+    }
+
+    boolean this_present_addPrivileges = true && this.isSetAddPrivileges();
+    boolean that_present_addPrivileges = true && that.isSetAddPrivileges();
+    if (this_present_addPrivileges || that_present_addPrivileges) {
+      if (!(this_present_addPrivileges && that_present_addPrivileges))
+        return false;
+      if (!this.addPrivileges.equals(that.addPrivileges))
+        return false;
+    }
+
+    boolean this_present_delPrivileges = true && this.isSetDelPrivileges();
+    boolean that_present_delPrivileges = true && that.isSetDelPrivileges();
+    if (this_present_delPrivileges || that_present_delPrivileges) {
+      if (!(this_present_delPrivileges && that_present_delPrivileges))
+        return false;
+      if (!this.delPrivileges.equals(that.delPrivileges))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    HashCodeBuilder builder = new HashCodeBuilder();
+
+    boolean present_authzObj = true && (isSetAuthzObj());
+    builder.append(present_authzObj);
+    if (present_authzObj)
+      builder.append(authzObj);
+
+    boolean present_addPrivileges = true && (isSetAddPrivileges());
+    builder.append(present_addPrivileges);
+    if (present_addPrivileges)
+      builder.append(addPrivileges);
+
+    boolean present_delPrivileges = true && (isSetDelPrivileges());
+    builder.append(present_delPrivileges);
+    if (present_delPrivileges)
+      builder.append(delPrivileges);
+
+    return builder.toHashCode();
+  }
+
+  public int compareTo(TPrivilegeChanges other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    TPrivilegeChanges typedOther = (TPrivilegeChanges)other;
+
+    lastComparison = Boolean.valueOf(isSetAuthzObj()).compareTo(typedOther.isSetAuthzObj());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetAuthzObj()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.authzObj, typedOther.authzObj);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetAddPrivileges()).compareTo(typedOther.isSetAddPrivileges());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetAddPrivileges()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.addPrivileges, typedOther.addPrivileges);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetDelPrivileges()).compareTo(typedOther.isSetDelPrivileges());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetDelPrivileges()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.delPrivileges, typedOther.delPrivileges);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("TPrivilegeChanges(");
+    boolean first = true;
+
+    sb.append("authzObj:");
+    if (this.authzObj == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.authzObj);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("addPrivileges:");
+    if (this.addPrivileges == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.addPrivileges);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("delPrivileges:");
+    if (this.delPrivileges == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.delPrivileges);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetAuthzObj()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'authzObj' is unset! Struct:" + toString());
+    }
+
+    if (!isSetAddPrivileges()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'addPrivileges' is unset! Struct:" + toString());
+    }
+
+    if (!isSetDelPrivileges()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'delPrivileges' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TPrivilegeChangesStandardSchemeFactory implements SchemeFactory {
+    public TPrivilegeChangesStandardScheme getScheme() {
+      return new TPrivilegeChangesStandardScheme();
+    }
+  }
+
+  private static class TPrivilegeChangesStandardScheme extends StandardScheme<TPrivilegeChanges> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, TPrivilegeChanges struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // AUTHZ_OBJ
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.authzObj = iprot.readString();
+              struct.setAuthzObjIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // ADD_PRIVILEGES
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map58 = iprot.readMapBegin();
+                struct.addPrivileges = new HashMap<String,String>(2*_map58.size);
+                for (int _i59 = 0; _i59 < _map58.size; ++_i59)
+                {
+                  String _key60; // required
+                  String _val61; // required
+                  _key60 = iprot.readString();
+                  _val61 = iprot.readString();
+                  struct.addPrivileges.put(_key60, _val61);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setAddPrivilegesIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // DEL_PRIVILEGES
+            if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+              {
+                org.apache.thrift.protocol.TMap _map62 = iprot.readMapBegin();
+                struct.delPrivileges = new HashMap<String,String>(2*_map62.size);
+                for (int _i63 = 0; _i63 < _map62.size; ++_i63)
+                {
+                  String _key64; // required
+                  String _val65; // required
+                  _key64 = iprot.readString();
+                  _val65 = iprot.readString();
+                  struct.delPrivileges.put(_key64, _val65);
+                }
+                iprot.readMapEnd();
+              }
+              struct.setDelPrivilegesIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, TPrivilegeChanges struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.authzObj != null) {
+        oprot.writeFieldBegin(AUTHZ_OBJ_FIELD_DESC);
+        oprot.writeString(struct.authzObj);
+        oprot.writeFieldEnd();
+      }
+      if (struct.addPrivileges != null) {
+        oprot.writeFieldBegin(ADD_PRIVILEGES_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.addPrivileges.size()));
+          for (Map.Entry<String, String> _iter66 : struct.addPrivileges.entrySet())
+          {
+            oprot.writeString(_iter66.getKey());
+            oprot.writeString(_iter66.getValue());
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.delPrivileges != null) {
+        oprot.writeFieldBegin(DEL_PRIVILEGES_FIELD_DESC);
+        {
+          oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.delPrivileges.size()));
+          for (Map.Entry<String, String> _iter67 : struct.delPrivileges.entrySet())
+          {
+            oprot.writeString(_iter67.getKey());
+            oprot.writeString(_iter67.getValue());
+          }
+          oprot.writeMapEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TPrivilegeChangesTupleSchemeFactory implements SchemeFactory {
+    public TPrivilegeChangesTupleScheme getScheme() {
+      return new TPrivilegeChangesTupleScheme();
+    }
+  }
+
+  private static class TPrivilegeChangesTupleScheme extends TupleScheme<TPrivilegeChanges> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, TPrivilegeChanges struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.authzObj);
+      {
+        oprot.writeI32(struct.addPrivileges.size());
+        for (Map.Entry<String, String> _iter68 : struct.addPrivileges.entrySet())
+        {
+          oprot.writeString(_iter68.getKey());
+          oprot.writeString(_iter68.getValue());
+        }
+      }
+      {
+        oprot.writeI32(struct.delPrivileges.size());
+        for (Map.Entry<String, String> _iter69 : struct.delPrivileges.entrySet())
+        {
+          oprot.writeString(_iter69.getKey());
+          oprot.writeString(_iter69.getValue());
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, TPrivilegeChanges struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.authzObj = iprot.readString();
+      struct.setAuthzObjIsSet(true);
+      {
+        org.apache.thrift.protocol.TMap _map70 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+        struct.addPrivileges = new HashMap<String,String>(2*_map70.size);
+        for (int _i71 = 0; _i71 < _map70.size; ++_i71)
+        {
+          String _key72; // required
+          String _val73; // required
+          _key72 = iprot.readString();
+          _val73 = iprot.readString();
+          struct.addPrivileges.put(_key72, _val73);
+        }
+      }
+      struct.setAddPrivilegesIsSet(true);
+      {
+        org.apache.thrift.protocol.TMap _map74 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+        struct.delPrivileges = new HashMap<String,String>(2*_map74.size);
+        for (int _i75 = 0; _i75 < _map74.size; ++_i75)
+        {
+          String _key76; // required
+          String _val77; // required
+          _key76 = iprot.readString();
+          _val77 = iprot.readString();
+          struct.delPrivileges.put(_key76, _val77);
+        }
+      }
+      struct.setDelPrivilegesIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TRoleChanges.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TRoleChanges.java b/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TRoleChanges.java
new file mode 100644
index 0000000..87ef02d
--- /dev/null
+++ b/sentry-hdfs/src/gen/thrift/gen-javabean/org/apache/sentry/hdfs/service/thrift/TRoleChanges.java
@@ -0,0 +1,691 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.sentry.hdfs.service.thrift;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TRoleChanges implements org.apache.thrift.TBase<TRoleChanges, TRoleChanges._Fields>, java.io.Serializable, Cloneable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRoleChanges");
+
+  private static final org.apache.thrift.protocol.TField ROLE_FIELD_DESC = new org.apache.thrift.protocol.TField("role", org.apache.thrift.protocol.TType.STRING, (short)1);
+  private static final org.apache.thrift.protocol.TField ADD_GROUPS_FIELD_DESC = new org.apache.thrift.protocol.TField("addGroups", org.apache.thrift.protocol.TType.LIST, (short)2);
+  private static final org.apache.thrift.protocol.TField DEL_GROUPS_FIELD_DESC = new org.apache.thrift.protocol.TField("delGroups", org.apache.thrift.protocol.TType.LIST, (short)3);
+
+  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+  static {
+    schemes.put(StandardScheme.class, new TRoleChangesStandardSchemeFactory());
+    schemes.put(TupleScheme.class, new TRoleChangesTupleSchemeFactory());
+  }
+
+  private String role; // required
+  private List<String> addGroups; // required
+  private List<String> delGroups; // required
+
+  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+    ROLE((short)1, "role"),
+    ADD_GROUPS((short)2, "addGroups"),
+    DEL_GROUPS((short)3, "delGroups");
+
+    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+    static {
+      for (_Fields field : EnumSet.allOf(_Fields.class)) {
+        byName.put(field.getFieldName(), field);
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, or null if its not found.
+     */
+    public static _Fields findByThriftId(int fieldId) {
+      switch(fieldId) {
+        case 1: // ROLE
+          return ROLE;
+        case 2: // ADD_GROUPS
+          return ADD_GROUPS;
+        case 3: // DEL_GROUPS
+          return DEL_GROUPS;
+        default:
+          return null;
+      }
+    }
+
+    /**
+     * Find the _Fields constant that matches fieldId, throwing an exception
+     * if it is not found.
+     */
+    public static _Fields findByThriftIdOrThrow(int fieldId) {
+      _Fields fields = findByThriftId(fieldId);
+      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+      return fields;
+    }
+
+    /**
+     * Find the _Fields constant that matches name, or null if its not found.
+     */
+    public static _Fields findByName(String name) {
+      return byName.get(name);
+    }
+
+    private final short _thriftId;
+    private final String _fieldName;
+
+    _Fields(short thriftId, String fieldName) {
+      _thriftId = thriftId;
+      _fieldName = fieldName;
+    }
+
+    public short getThriftFieldId() {
+      return _thriftId;
+    }
+
+    public String getFieldName() {
+      return _fieldName;
+    }
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+  static {
+    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+    tmpMap.put(_Fields.ROLE, new org.apache.thrift.meta_data.FieldMetaData("role", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+    tmpMap.put(_Fields.ADD_GROUPS, new org.apache.thrift.meta_data.FieldMetaData("addGroups", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    tmpMap.put(_Fields.DEL_GROUPS, new org.apache.thrift.meta_data.FieldMetaData("delGroups", org.apache.thrift.TFieldRequirementType.REQUIRED, 
+        new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, 
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+    metaDataMap = Collections.unmodifiableMap(tmpMap);
+    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TRoleChanges.class, metaDataMap);
+  }
+
+  public TRoleChanges() {
+  }
+
+  public TRoleChanges(
+    String role,
+    List<String> addGroups,
+    List<String> delGroups)
+  {
+    this();
+    this.role = role;
+    this.addGroups = addGroups;
+    this.delGroups = delGroups;
+  }
+
+  /**
+   * Performs a deep copy on <i>other</i>.
+   */
+  public TRoleChanges(TRoleChanges other) {
+    if (other.isSetRole()) {
+      this.role = other.role;
+    }
+    if (other.isSetAddGroups()) {
+      List<String> __this__addGroups = new ArrayList<String>();
+      for (String other_element : other.addGroups) {
+        __this__addGroups.add(other_element);
+      }
+      this.addGroups = __this__addGroups;
+    }
+    if (other.isSetDelGroups()) {
+      List<String> __this__delGroups = new ArrayList<String>();
+      for (String other_element : other.delGroups) {
+        __this__delGroups.add(other_element);
+      }
+      this.delGroups = __this__delGroups;
+    }
+  }
+
+  public TRoleChanges deepCopy() {
+    return new TRoleChanges(this);
+  }
+
+  @Override
+  public void clear() {
+    this.role = null;
+    this.addGroups = null;
+    this.delGroups = null;
+  }
+
+  public String getRole() {
+    return this.role;
+  }
+
+  public void setRole(String role) {
+    this.role = role;
+  }
+
+  public void unsetRole() {
+    this.role = null;
+  }
+
+  /** Returns true if field role is set (has been assigned a value) and false otherwise */
+  public boolean isSetRole() {
+    return this.role != null;
+  }
+
+  public void setRoleIsSet(boolean value) {
+    if (!value) {
+      this.role = null;
+    }
+  }
+
+  public int getAddGroupsSize() {
+    return (this.addGroups == null) ? 0 : this.addGroups.size();
+  }
+
+  public java.util.Iterator<String> getAddGroupsIterator() {
+    return (this.addGroups == null) ? null : this.addGroups.iterator();
+  }
+
+  public void addToAddGroups(String elem) {
+    if (this.addGroups == null) {
+      this.addGroups = new ArrayList<String>();
+    }
+    this.addGroups.add(elem);
+  }
+
+  public List<String> getAddGroups() {
+    return this.addGroups;
+  }
+
+  public void setAddGroups(List<String> addGroups) {
+    this.addGroups = addGroups;
+  }
+
+  public void unsetAddGroups() {
+    this.addGroups = null;
+  }
+
+  /** Returns true if field addGroups is set (has been assigned a value) and false otherwise */
+  public boolean isSetAddGroups() {
+    return this.addGroups != null;
+  }
+
+  public void setAddGroupsIsSet(boolean value) {
+    if (!value) {
+      this.addGroups = null;
+    }
+  }
+
+  public int getDelGroupsSize() {
+    return (this.delGroups == null) ? 0 : this.delGroups.size();
+  }
+
+  public java.util.Iterator<String> getDelGroupsIterator() {
+    return (this.delGroups == null) ? null : this.delGroups.iterator();
+  }
+
+  public void addToDelGroups(String elem) {
+    if (this.delGroups == null) {
+      this.delGroups = new ArrayList<String>();
+    }
+    this.delGroups.add(elem);
+  }
+
+  public List<String> getDelGroups() {
+    return this.delGroups;
+  }
+
+  public void setDelGroups(List<String> delGroups) {
+    this.delGroups = delGroups;
+  }
+
+  public void unsetDelGroups() {
+    this.delGroups = null;
+  }
+
+  /** Returns true if field delGroups is set (has been assigned a value) and false otherwise */
+  public boolean isSetDelGroups() {
+    return this.delGroups != null;
+  }
+
+  public void setDelGroupsIsSet(boolean value) {
+    if (!value) {
+      this.delGroups = null;
+    }
+  }
+
+  public void setFieldValue(_Fields field, Object value) {
+    switch (field) {
+    case ROLE:
+      if (value == null) {
+        unsetRole();
+      } else {
+        setRole((String)value);
+      }
+      break;
+
+    case ADD_GROUPS:
+      if (value == null) {
+        unsetAddGroups();
+      } else {
+        setAddGroups((List<String>)value);
+      }
+      break;
+
+    case DEL_GROUPS:
+      if (value == null) {
+        unsetDelGroups();
+      } else {
+        setDelGroups((List<String>)value);
+      }
+      break;
+
+    }
+  }
+
+  public Object getFieldValue(_Fields field) {
+    switch (field) {
+    case ROLE:
+      return getRole();
+
+    case ADD_GROUPS:
+      return getAddGroups();
+
+    case DEL_GROUPS:
+      return getDelGroups();
+
+    }
+    throw new IllegalStateException();
+  }
+
+  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+  public boolean isSet(_Fields field) {
+    if (field == null) {
+      throw new IllegalArgumentException();
+    }
+
+    switch (field) {
+    case ROLE:
+      return isSetRole();
+    case ADD_GROUPS:
+      return isSetAddGroups();
+    case DEL_GROUPS:
+      return isSetDelGroups();
+    }
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public boolean equals(Object that) {
+    if (that == null)
+      return false;
+    if (that instanceof TRoleChanges)
+      return this.equals((TRoleChanges)that);
+    return false;
+  }
+
+  public boolean equals(TRoleChanges that) {
+    if (that == null)
+      return false;
+
+    boolean this_present_role = true && this.isSetRole();
+    boolean that_present_role = true && that.isSetRole();
+    if (this_present_role || that_present_role) {
+      if (!(this_present_role && that_present_role))
+        return false;
+      if (!this.role.equals(that.role))
+        return false;
+    }
+
+    boolean this_present_addGroups = true && this.isSetAddGroups();
+    boolean that_present_addGroups = true && that.isSetAddGroups();
+    if (this_present_addGroups || that_present_addGroups) {
+      if (!(this_present_addGroups && that_present_addGroups))
+        return false;
+      if (!this.addGroups.equals(that.addGroups))
+        return false;
+    }
+
+    boolean this_present_delGroups = true && this.isSetDelGroups();
+    boolean that_present_delGroups = true && that.isSetDelGroups();
+    if (this_present_delGroups || that_present_delGroups) {
+      if (!(this_present_delGroups && that_present_delGroups))
+        return false;
+      if (!this.delGroups.equals(that.delGroups))
+        return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    HashCodeBuilder builder = new HashCodeBuilder();
+
+    boolean present_role = true && (isSetRole());
+    builder.append(present_role);
+    if (present_role)
+      builder.append(role);
+
+    boolean present_addGroups = true && (isSetAddGroups());
+    builder.append(present_addGroups);
+    if (present_addGroups)
+      builder.append(addGroups);
+
+    boolean present_delGroups = true && (isSetDelGroups());
+    builder.append(present_delGroups);
+    if (present_delGroups)
+      builder.append(delGroups);
+
+    return builder.toHashCode();
+  }
+
+  public int compareTo(TRoleChanges other) {
+    if (!getClass().equals(other.getClass())) {
+      return getClass().getName().compareTo(other.getClass().getName());
+    }
+
+    int lastComparison = 0;
+    TRoleChanges typedOther = (TRoleChanges)other;
+
+    lastComparison = Boolean.valueOf(isSetRole()).compareTo(typedOther.isSetRole());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetRole()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.role, typedOther.role);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetAddGroups()).compareTo(typedOther.isSetAddGroups());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetAddGroups()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.addGroups, typedOther.addGroups);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    lastComparison = Boolean.valueOf(isSetDelGroups()).compareTo(typedOther.isSetDelGroups());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetDelGroups()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.delGroups, typedOther.delGroups);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
+    return 0;
+  }
+
+  public _Fields fieldForId(int fieldId) {
+    return _Fields.findByThriftId(fieldId);
+  }
+
+  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+    schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+  }
+
+  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+    schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder("TRoleChanges(");
+    boolean first = true;
+
+    sb.append("role:");
+    if (this.role == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.role);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("addGroups:");
+    if (this.addGroups == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.addGroups);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("delGroups:");
+    if (this.delGroups == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.delGroups);
+    }
+    first = false;
+    sb.append(")");
+    return sb.toString();
+  }
+
+  public void validate() throws org.apache.thrift.TException {
+    // check for required fields
+    if (!isSetRole()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'role' is unset! Struct:" + toString());
+    }
+
+    if (!isSetAddGroups()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'addGroups' is unset! Struct:" + toString());
+    }
+
+    if (!isSetDelGroups()) {
+      throw new org.apache.thrift.protocol.TProtocolException("Required field 'delGroups' is unset! Struct:" + toString());
+    }
+
+    // check for sub-struct validity
+  }
+
+  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+    try {
+      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+    try {
+      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+    } catch (org.apache.thrift.TException te) {
+      throw new java.io.IOException(te);
+    }
+  }
+
+  private static class TRoleChangesStandardSchemeFactory implements SchemeFactory {
+    public TRoleChangesStandardScheme getScheme() {
+      return new TRoleChangesStandardScheme();
+    }
+  }
+
+  private static class TRoleChangesStandardScheme extends StandardScheme<TRoleChanges> {
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot, TRoleChanges struct) throws org.apache.thrift.TException {
+      org.apache.thrift.protocol.TField schemeField;
+      iprot.readStructBegin();
+      while (true)
+      {
+        schemeField = iprot.readFieldBegin();
+        if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { 
+          break;
+        }
+        switch (schemeField.id) {
+          case 1: // ROLE
+            if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+              struct.role = iprot.readString();
+              struct.setRoleIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 2: // ADD_GROUPS
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list78 = iprot.readListBegin();
+                struct.addGroups = new ArrayList<String>(_list78.size);
+                for (int _i79 = 0; _i79 < _list78.size; ++_i79)
+                {
+                  String _elem80; // required
+                  _elem80 = iprot.readString();
+                  struct.addGroups.add(_elem80);
+                }
+                iprot.readListEnd();
+              }
+              struct.setAddGroupsIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          case 3: // DEL_GROUPS
+            if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+              {
+                org.apache.thrift.protocol.TList _list81 = iprot.readListBegin();
+                struct.delGroups = new ArrayList<String>(_list81.size);
+                for (int _i82 = 0; _i82 < _list81.size; ++_i82)
+                {
+                  String _elem83; // required
+                  _elem83 = iprot.readString();
+                  struct.delGroups.add(_elem83);
+                }
+                iprot.readListEnd();
+              }
+              struct.setDelGroupsIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
+          default:
+            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+        }
+        iprot.readFieldEnd();
+      }
+      iprot.readStructEnd();
+      struct.validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot, TRoleChanges struct) throws org.apache.thrift.TException {
+      struct.validate();
+
+      oprot.writeStructBegin(STRUCT_DESC);
+      if (struct.role != null) {
+        oprot.writeFieldBegin(ROLE_FIELD_DESC);
+        oprot.writeString(struct.role);
+        oprot.writeFieldEnd();
+      }
+      if (struct.addGroups != null) {
+        oprot.writeFieldBegin(ADD_GROUPS_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.addGroups.size()));
+          for (String _iter84 : struct.addGroups)
+          {
+            oprot.writeString(_iter84);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      if (struct.delGroups != null) {
+        oprot.writeFieldBegin(DEL_GROUPS_FIELD_DESC);
+        {
+          oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.delGroups.size()));
+          for (String _iter85 : struct.delGroups)
+          {
+            oprot.writeString(_iter85);
+          }
+          oprot.writeListEnd();
+        }
+        oprot.writeFieldEnd();
+      }
+      oprot.writeFieldStop();
+      oprot.writeStructEnd();
+    }
+
+  }
+
+  private static class TRoleChangesTupleSchemeFactory implements SchemeFactory {
+    public TRoleChangesTupleScheme getScheme() {
+      return new TRoleChangesTupleScheme();
+    }
+  }
+
+  private static class TRoleChangesTupleScheme extends TupleScheme<TRoleChanges> {
+
+    @Override
+    public void write(org.apache.thrift.protocol.TProtocol prot, TRoleChanges struct) throws org.apache.thrift.TException {
+      TTupleProtocol oprot = (TTupleProtocol) prot;
+      oprot.writeString(struct.role);
+      {
+        oprot.writeI32(struct.addGroups.size());
+        for (String _iter86 : struct.addGroups)
+        {
+          oprot.writeString(_iter86);
+        }
+      }
+      {
+        oprot.writeI32(struct.delGroups.size());
+        for (String _iter87 : struct.delGroups)
+        {
+          oprot.writeString(_iter87);
+        }
+      }
+    }
+
+    @Override
+    public void read(org.apache.thrift.protocol.TProtocol prot, TRoleChanges struct) throws org.apache.thrift.TException {
+      TTupleProtocol iprot = (TTupleProtocol) prot;
+      struct.role = iprot.readString();
+      struct.setRoleIsSet(true);
+      {
+        org.apache.thrift.protocol.TList _list88 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+        struct.addGroups = new ArrayList<String>(_list88.size);
+        for (int _i89 = 0; _i89 < _list88.size; ++_i89)
+        {
+          String _elem90; // required
+          _elem90 = iprot.readString();
+          struct.addGroups.add(_elem90);
+        }
+      }
+      struct.setAddGroupsIsSet(true);
+      {
+        org.apache.thrift.protocol.TList _list91 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+        struct.delGroups = new ArrayList<String>(_list91.size);
+        for (int _i92 = 0; _i92 < _list91.size; ++_i92)
+        {
+          String _elem93; // required
+          _elem93 = iprot.readString();
+          struct.delGroups.add(_elem93);
+        }
+      }
+      struct.setDelGroupsIsSet(true);
+    }
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPaths.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPaths.java
new file mode 100644
index 0000000..9ea50c7
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPaths.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public interface AuthzPaths {
+
+  public boolean isUnderPrefix(String[] pathElements);
+
+  public String findAuthzObject(String[] pathElements);
+
+  public String findAuthzObjectExactMatch(String[] pathElements);
+
+  public AuthzPathsDumper<? extends AuthzPaths> getPathsDump();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPathsDumper.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPathsDumper.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPathsDumper.java
new file mode 100644
index 0000000..2bd2a88
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPathsDumper.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+
+public interface AuthzPathsDumper<K extends AuthzPaths> {
+
+  public TPathsDump createPathsDump();
+
+  public K initializeFromDump(TPathsDump pathsDump);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPermissions.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPermissions.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPermissions.java
new file mode 100644
index 0000000..1631ae5
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/AuthzPermissions.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.hadoop.fs.permission.AclEntry;
+
+import java.util.List;
+
+public interface AuthzPermissions {
+
+  public List<AclEntry> getAcls(String authzObj);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/ExtendedMetastoreClient.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/ExtendedMetastoreClient.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/ExtendedMetastoreClient.java
new file mode 100644
index 0000000..c0358f4
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/ExtendedMetastoreClient.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ExtendedMetastoreClient implements MetastoreClient {
+  
+  private static Logger LOG = LoggerFactory.getLogger(ExtendedMetastoreClient.class);
+
+  private HiveMetaStoreClient client;
+  private final HiveConf hiveConf;
+  public ExtendedMetastoreClient(HiveConf hiveConf) {
+    this.hiveConf = hiveConf;
+  }
+
+  @Override
+  public List<Database> getAllDatabases() {
+    List<Database> retList = new ArrayList<Database>();
+    HiveMetaStoreClient client = getClient();
+    if (client != null) {
+      try {
+        for (String dbName : client.getAllDatabases()) {
+          retList.add(client.getDatabase(dbName));
+        }
+      } catch (Exception e) {
+        LOG.error("Could not get All Databases !!", e);
+      }
+    }
+    return retList;
+  }
+
+  @Override
+  public List<Table> getAllTablesOfDatabase(Database db) {
+    List<Table> retList = new ArrayList<Table>();
+    HiveMetaStoreClient client = getClient();
+    if (client != null) {
+      try {
+        for (String tblName : client.getAllTables(db.getName())) {
+          retList.add(client.getTable(db.getName(), tblName));
+        }
+      } catch (Exception e) {
+        LOG.error(String.format(
+            "Could not get Tables for '%s' !!", db.getName()), e);
+      }
+    }
+    return retList;
+  }
+
+  @Override
+  public List<Partition> listAllPartitions(Database db, Table tbl) {
+    HiveMetaStoreClient client = getClient();
+    if (client != null) {
+      try {
+        return client.listPartitions(db.getName(), tbl.getTableName(), Short.MAX_VALUE);
+      } catch (Exception e) {
+        LOG.error(String.format(
+            "Could not get partitions for '%s'.'%s' !!", db.getName(),
+            tbl.getTableName()), e);
+      }
+    }
+    return new LinkedList<Partition>();
+  }
+
+  private HiveMetaStoreClient getClient() {
+    if (client == null) {
+      try {
+        client = new HiveMetaStoreClient(hiveConf);
+        return client;
+      } catch (MetaException e) {
+        client = null;
+        LOG.error("Could not create metastore client !!", e);
+        return null;
+      }
+    } else {
+      return client;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPaths.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPaths.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPaths.java
new file mode 100644
index 0000000..e445634
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPaths.java
@@ -0,0 +1,467 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.fs.Path;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
+public class HMSPaths implements AuthzPaths {
+
+  @VisibleForTesting
+  static List<String> getPathElements(String path) {
+    path = path.trim();
+    if (path.charAt(0) != Path.SEPARATOR_CHAR) {
+      throw new IllegalArgumentException("It must be an absolute path: " + 
+          path);
+    }
+    List<String> list = new ArrayList<String>(32);
+    int idx = 0;
+    int found = path.indexOf(Path.SEPARATOR_CHAR, idx);
+    while (found > -1) {
+      if (found > idx) {
+        list.add(path.substring(idx, found));
+      }
+      idx = found + 1;
+      found = path.indexOf(Path.SEPARATOR_CHAR, idx);
+    }
+    if (idx < path.length()) {
+      list.add(path.substring(idx));
+    }
+    return list;
+  }
+
+  @VisibleForTesting
+  static List<List<String>> gePathsElements(List<String> paths) {
+    List<List<String>> pathsElements = new ArrayList<List<String>>(paths.size());
+    for (String path : paths) {
+      pathsElements.add(getPathElements(path));
+    }
+    return pathsElements;
+  }
+
+  @VisibleForTesting
+  enum EntryType {
+    DIR(true),
+    PREFIX(false),
+    AUTHZ_OBJECT(false);
+
+    private boolean removeIfDangling;
+
+    private EntryType(boolean removeIfDangling) {
+      this.removeIfDangling = removeIfDangling;
+    }
+
+    public boolean isRemoveIfDangling() {
+      return removeIfDangling;
+    }
+
+    public byte getByte() {
+      return (byte)toString().charAt(0);
+    }
+    
+    public static EntryType fromByte(byte b) {
+      switch (b) {
+      case ((byte)'D'):
+        return DIR;
+      case ((byte)'P'):
+        return PREFIX;
+      case ((byte)'A'):
+        return AUTHZ_OBJECT;
+      default:
+        return null;
+      }
+    }
+  }
+
+  @VisibleForTesting
+  static class Entry {
+    private Entry parent;
+    private EntryType type;
+    private final String pathElement;
+    private String authzObj;
+    private final Map<String, Entry> children;
+
+    Entry(Entry parent, String pathElement, EntryType type,
+        String authzObj) {
+      this.parent = parent;
+      this.type = type;
+      this.pathElement = pathElement;
+      this.authzObj = authzObj;
+      children = new HashMap<String, Entry>();
+    }
+
+    private void setAuthzObj(String authzObj) {
+      this.authzObj = authzObj;
+    }
+
+    private void setType(EntryType type) {
+      this.type = type;
+    }
+
+    protected void removeParent() {
+      parent = null;
+    }
+
+    public String toString() {
+      return String.format("Entry[fullPath: %s, type: %s, authObject: %s]",
+          getFullPath(), type, authzObj);
+    }
+
+    private Entry createChild(List<String> pathElements, EntryType type,
+        String authzObj) {
+      Entry entryParent = this;
+      for (int i = 0; i < pathElements.size() - 1; i++) {
+        String pathElement = pathElements.get(i);
+        Entry child = entryParent.getChildren().get(pathElement);
+        if (child == null) {
+          child = new Entry(entryParent, pathElement, EntryType.DIR, null);
+          entryParent.getChildren().put(pathElement, child);
+        }
+        entryParent = child;
+      }
+      String lastPathElement = pathElements.get(pathElements.size() - 1);
+      Entry child = entryParent.getChildren().get(lastPathElement);
+      if (child == null) {
+        child = new Entry(entryParent, lastPathElement, type, authzObj);
+        entryParent.getChildren().put(lastPathElement, child);
+      } else if (type == EntryType.AUTHZ_OBJECT &&
+          child.getType() == EntryType.DIR) {
+        // if the entry already existed as dir, we change it  to be a authz obj
+        child.setAuthzObj(authzObj);
+        child.setType(EntryType.AUTHZ_OBJECT);
+      }
+      return child;
+    }
+
+    public static Entry createRoot(boolean asPrefix) {
+      return new Entry(null, "/", (asPrefix) 
+                                   ? EntryType.PREFIX : EntryType.DIR, null);
+    }
+
+    private String toPath(List<String> arr) {
+      StringBuilder sb = new StringBuilder();
+      for (String s : arr) {
+        sb.append(Path.SEPARATOR).append(s);
+      }
+      return sb.toString();
+    }
+    
+    public Entry createPrefix(List<String> pathElements) {
+      Entry prefix = findPrefixEntry(pathElements);
+      if (prefix != null) {
+        throw new IllegalArgumentException(String.format(
+            "Cannot add prefix '%s' under an existing prefix '%s'", 
+            toPath(pathElements), prefix.getFullPath()));
+      }
+      return createChild(pathElements, EntryType.PREFIX, null);
+    }
+
+    public Entry createAuthzObjPath(List<String> pathElements, String authzObj) {
+      Entry entry = null;
+      Entry prefix = findPrefixEntry(pathElements);
+      if (prefix != null) {
+        // we only create the entry if is under a prefix, else we ignore it
+        entry = createChild(pathElements, EntryType.AUTHZ_OBJECT, authzObj);
+      }
+      return entry;
+    }
+
+    public void delete() {
+      if (getParent() != null) {
+        if (getChildren().isEmpty()) {
+          getParent().getChildren().remove(getPathElement());
+          getParent().deleteIfDangling();
+          parent = null;
+        } else {
+          // if the entry was for an authz object and has children, we
+          // change it to be a dir entry.
+          if (getType() == EntryType.AUTHZ_OBJECT) {
+            setType(EntryType.DIR);
+            setAuthzObj(null);
+          }
+        }
+      }
+    }
+
+    private void deleteIfDangling() {
+      if (getChildren().isEmpty() && getType().isRemoveIfDangling()) {
+        delete();
+      }
+    }
+
+    public Entry getParent() {
+      return parent;
+    }
+
+    public EntryType getType() {
+      return type;
+    }
+
+    public String getPathElement() {
+      return pathElement;
+    }
+
+    public String getAuthzObj() {
+      return authzObj;
+    }
+
+    @SuppressWarnings("unchecked")
+    public Map<String, Entry> getChildren() {
+      return children;
+    }
+
+    public Entry findPrefixEntry(List<String> pathElements) {
+      Preconditions.checkArgument(pathElements != null,
+          "pathElements cannot be NULL");
+      return (getType() == EntryType.PREFIX) 
+             ? this : findPrefixEntry(pathElements, 0);
+    }
+
+    private Entry findPrefixEntry(List<String> pathElements, int index) {
+      Entry prefixEntry = null;
+      if (index == pathElements.size()) {
+        prefixEntry = null;
+      } else {
+        Entry child = getChildren().get(pathElements.get(index));
+        if (child != null) {
+          if (child.getType() == EntryType.PREFIX) {
+            prefixEntry = child;
+          } else {
+            prefixEntry = child.findPrefixEntry(pathElements, index + 1);
+          }
+        }
+      }
+      return prefixEntry;
+    }
+
+    public Entry find(String[] pathElements, boolean isPartialMatchOk) {
+      Preconditions.checkArgument(
+          pathElements != null && pathElements.length > 0,
+          "pathElements cannot be NULL or empty");
+      return find(pathElements, 0, isPartialMatchOk, null);
+    }
+
+    private Entry find(String[] pathElements, int index,
+        boolean isPartialMatchOk, Entry lastAuthObj) {
+      Entry found = null;
+      if (index == pathElements.length) {
+        if (isPartialMatchOk && (getType() == EntryType.AUTHZ_OBJECT)) {
+          found = this;
+        }
+      } else {
+        Entry child = getChildren().get(pathElements[index]);
+        if (child != null) {
+          if (index == pathElements.length - 1) {
+            found = (child.getType() == EntryType.AUTHZ_OBJECT) ? child : lastAuthObj;
+          } else {
+            found = child.find(pathElements, index + 1, isPartialMatchOk,
+                (child.getType() == EntryType.AUTHZ_OBJECT) ? child : lastAuthObj);
+          }
+        } else {
+          if (isPartialMatchOk) {
+            found = lastAuthObj;
+          }
+        }
+      }
+      return found;
+    }
+
+    public String getFullPath() {
+      String path = getFullPath(this, new StringBuilder()).toString();
+      if (path.isEmpty()) {
+        path = Path.SEPARATOR;
+      }
+      return path;
+    }
+
+    private StringBuilder getFullPath(Entry entry, StringBuilder sb) {
+      if (entry.getParent() != null) {
+        getFullPath(entry.getParent(), sb).append(Path.SEPARATOR).append(
+            entry.getPathElement());
+      }
+      return sb;
+    }
+
+  }
+
+  private volatile Entry root;
+  private Map<String, Set<Entry>> authzObjToPath;
+
+  public HMSPaths(String[] pathPrefixes) {
+    boolean rootPrefix = false;
+    for (String pathPrefix : pathPrefixes) {
+      rootPrefix = rootPrefix || pathPrefix.equals(Path.SEPARATOR);
+    }
+    if (rootPrefix && pathPrefixes.length > 1) {
+      throw new IllegalArgumentException(
+          "Root is a path prefix, there cannot be other path prefixes");
+    }
+    root = Entry.createRoot(rootPrefix);
+    if (!rootPrefix) {
+      for (String pathPrefix : pathPrefixes) {
+        root.createPrefix(getPathElements(pathPrefix));
+      }
+    }
+    authzObjToPath = new HashMap<String, Set<Entry>>();
+  }
+
+  HMSPaths() {
+    authzObjToPath = new HashMap<String, Set<Entry>>();
+  }
+
+  void _addAuthzObject(String authzObj, List<String> authzObjPaths) {
+    addAuthzObject(authzObj, gePathsElements(authzObjPaths));
+  }
+
+  void addAuthzObject(String authzObj, List<List<String>> authzObjPathElements) {
+    Set<Entry> previousEntries = authzObjToPath.get(authzObj);
+    Set<Entry> newEntries = new HashSet<Entry>(authzObjPathElements.size());
+    for (List<String> pathElements : authzObjPathElements) {
+      Entry e = root.createAuthzObjPath(pathElements, authzObj);
+      if (e != null) {
+        newEntries.add(e);
+      } else {
+        // LOG WARN IGNORING PATH, no prefix
+      }
+    }
+    authzObjToPath.put(authzObj, newEntries);
+    if (previousEntries != null) {
+      previousEntries.removeAll(newEntries);
+      if (!previousEntries.isEmpty()) {
+        for (Entry entry : previousEntries) {
+          entry.delete();
+        }
+      }
+    }
+  }
+
+  void addPathsToAuthzObject(String authzObj,
+      List<List<String>> authzObjPathElements, boolean createNew) {
+    Set<Entry> entries = authzObjToPath.get(authzObj);
+    if (entries != null) {
+      Set<Entry> newEntries = new HashSet<Entry>(authzObjPathElements.size());
+      for (List<String> pathElements : authzObjPathElements) {
+        Entry e = root.createAuthzObjPath(pathElements, authzObj);
+        if (e != null) {
+          newEntries.add(e);
+        } else {
+          // LOG WARN IGNORING PATH, no prefix
+        }
+      }
+      entries.addAll(newEntries);
+    } else {
+      if (createNew) {
+        addAuthzObject(authzObj, authzObjPathElements);
+      }
+      // LOG WARN object does not exist
+    }
+  }
+
+  void _addPathsToAuthzObject(String authzObj, List<String> authzObjPaths) {
+    addPathsToAuthzObject(authzObj, gePathsElements(authzObjPaths), false);
+  }
+
+  void addPathsToAuthzObject(String authzObj, List<List<String>> authzObjPaths) {
+    addPathsToAuthzObject(authzObj, authzObjPaths, false);
+  }
+
+  void deletePathsFromAuthzObject(String authzObj,
+      List<List<String>> authzObjPathElements) {
+    Set<Entry> entries = authzObjToPath.get(authzObj);
+    if (entries != null) {
+      Set<Entry> toDelEntries = new HashSet<Entry>(authzObjPathElements.size());
+      for (List<String> pathElements : authzObjPathElements) {
+        Entry entry = root.find(
+            pathElements.toArray(new String[pathElements.size()]), false);
+        if (entry != null) {
+          entry.delete();
+          toDelEntries.add(entry);
+        } else {
+          // LOG WARN IGNORING PATH, it was not in registered
+        }
+      }
+      entries.removeAll(toDelEntries);
+    } else {
+      // LOG WARN object does not exist
+    }
+  }
+
+  void deleteAuthzObject(String authzObj) {
+    Set<Entry> entries = authzObjToPath.remove(authzObj);
+    if (entries != null) {
+      for (Entry entry : entries) {
+        entry.delete();
+      }
+    }
+  }
+
+  @Override
+  public String findAuthzObject(String[] pathElements) {
+    return findAuthzObject(pathElements, true);
+  }
+
+  @Override
+  public String findAuthzObjectExactMatch(String[] pathElements) {
+    return findAuthzObject(pathElements, false);
+  }
+
+  public String findAuthzObject(String[] pathElements, boolean isPartialOk) {
+    // Handle '/'
+    if ((pathElements == null)||(pathElements.length == 0)) return null;
+    String authzObj = null;
+    Entry entry = root.find(pathElements, isPartialOk);
+    if (entry != null) {
+      authzObj = entry.getAuthzObj();
+    }
+    return authzObj;
+  }
+
+  @Override
+  public boolean isUnderPrefix(String[] pathElements) {
+    return root.findPrefixEntry(Lists.newArrayList(pathElements)) != null;
+  }
+
+  // Used by the serializer
+  Entry getRootEntry() {
+    return root;
+  }
+
+  void setRootEntry(Entry root) {
+    this.root = root;
+  }
+
+  void setAuthzObjToPathMapping(Map<String, Set<Entry>> mapping) {
+    authzObjToPath = mapping;
+  }
+
+  @Override
+  public HMSPathsSerDe getPathsDump() {
+    return new HMSPathsSerDe(this);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPathsSerDe.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPathsSerDe.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPathsSerDe.java
new file mode 100644
index 0000000..b642a3b
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/HMSPathsSerDe.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.sentry.hdfs.HMSPaths.Entry;
+import org.apache.sentry.hdfs.HMSPaths.EntryType;
+import org.apache.sentry.hdfs.service.thrift.TPathEntry;
+import org.apache.sentry.hdfs.service.thrift.TPathsDump;
+
+public class HMSPathsSerDe implements AuthzPathsDumper<HMSPaths> {
+
+  private final HMSPaths hmsPaths;
+
+  static class Tuple {
+    final TPathEntry entry;
+    final int id;
+    Tuple(TPathEntry entry, int id) {
+      this.entry = entry;
+      this.id = id;
+    }
+  }
+
+  public HMSPathsSerDe(HMSPaths hmsPaths) {
+    this.hmsPaths = hmsPaths;
+  }
+
+  @Override
+  public TPathsDump createPathsDump() {
+    AtomicInteger counter = new AtomicInteger(0);
+    Map<Integer, TPathEntry> idMap = new HashMap<Integer, TPathEntry>();
+    Tuple tRootTuple =
+        createTPathEntry(hmsPaths.getRootEntry(), counter, idMap);
+    idMap.put(tRootTuple.id, tRootTuple.entry);
+    cloneToTPathEntry(hmsPaths.getRootEntry(), tRootTuple.entry, counter, idMap);
+    return new TPathsDump(tRootTuple.id, idMap);
+  }
+
+  private void cloneToTPathEntry(Entry parent, TPathEntry tParent,
+      AtomicInteger counter, Map<Integer, TPathEntry> idMap) {
+    for (Entry child : parent.getChildren().values()) {
+      Tuple childTuple = createTPathEntry(child, counter, idMap);
+      tParent.getChildren().add(childTuple.id);
+      cloneToTPathEntry(child, childTuple.entry, counter, idMap);
+    }
+  }
+
+  private Tuple createTPathEntry(Entry entry, AtomicInteger idCounter,
+      Map<Integer, TPathEntry> idMap) {
+    int myId = idCounter.incrementAndGet();
+    TPathEntry tEntry = new TPathEntry(entry.getType().getByte(),
+        entry.getPathElement(), new HashSet<Integer>());
+    if (entry.getAuthzObj() != null) {
+      tEntry.setAuthzObj(entry.getAuthzObj());
+    }
+    idMap.put(myId, tEntry);
+    return new Tuple(tEntry, myId);
+  }
+
+  @Override
+  public HMSPaths initializeFromDump(TPathsDump pathDump) {
+    HMSPaths hmsPaths = new HMSPaths();
+    TPathEntry tRootEntry = pathDump.getNodeMap().get(pathDump.getRootId());
+    Entry rootEntry = new Entry(null, tRootEntry.getPathElement(),
+        EntryType.fromByte(tRootEntry.getType()), tRootEntry.getAuthzObj());
+    Map<String, Set<Entry>> authzObjToPath = new HashMap<String, Set<Entry>>();
+    cloneToEntry(tRootEntry, rootEntry, pathDump.getNodeMap(), authzObjToPath);
+    hmsPaths.setRootEntry(rootEntry);
+    hmsPaths.setAuthzObjToPathMapping(authzObjToPath);
+    return hmsPaths;
+  }
+
+  private void cloneToEntry(TPathEntry tParent, Entry parent,
+      Map<Integer, TPathEntry> idMap, Map<String, Set<Entry>> authzObjToPath) {
+    for (Integer id : tParent.getChildren()) {
+      TPathEntry tChild = idMap.get(id);
+      Entry child = new Entry(parent, tChild.getPathElement(),
+          EntryType.fromByte(tChild.getType()), tChild.getAuthzObj());
+      if (child.getAuthzObj() != null) {
+        Set<Entry> paths = authzObjToPath.get(child.getAuthzObj());
+        if (paths == null) {
+          paths = new HashSet<Entry>();
+          authzObjToPath.put(child.getAuthzObj(), paths);
+        }
+        paths.add(child);
+      }
+      parent.getChildren().put(child.getPathElement(), child);
+      cloneToEntry(tChild, child, idMap, authzObjToPath);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/MetastoreClient.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/MetastoreClient.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/MetastoreClient.java
new file mode 100644
index 0000000..3b64756
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/MetastoreClient.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+
+public interface MetastoreClient {
+
+  public List<Database> getAllDatabases();
+
+  public List<Table> getAllTablesOfDatabase(Database db);
+
+  public List<Partition> listAllPartitions(Database db, Table tbl);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PathsUpdate.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PathsUpdate.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PathsUpdate.java
new file mode 100644
index 0000000..c5ac783
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PathsUpdate.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
+import org.apache.sentry.hdfs.service.thrift.TPathsUpdate;
+
+import com.google.common.collect.Lists;
+
+public class PathsUpdate implements Updateable.Update {
+  
+  public static String ALL_PATHS = "__ALL_PATHS__";
+
+  private final TPathsUpdate tPathsUpdate;
+
+  public PathsUpdate(TPathsUpdate tPathsUpdate) {
+    this.tPathsUpdate = tPathsUpdate;
+  }
+
+  public PathsUpdate(long seqNum, boolean hasFullImage) {
+    tPathsUpdate = new TPathsUpdate(hasFullImage, seqNum,
+        new LinkedList<TPathChanges>());
+  }
+
+  @Override
+  public boolean hasFullImage() {
+    return tPathsUpdate.isHasFullImage();
+  }
+  public TPathChanges newPathChange(String authzObject) {
+    TPathChanges pathChanges = new TPathChanges(authzObject,
+        new LinkedList<List<String>>(), new LinkedList<List<String>>());
+    tPathsUpdate.addToPathChanges(pathChanges);
+    return pathChanges;
+  }
+  public List<TPathChanges> getPathChanges() {
+    return tPathsUpdate.getPathChanges();
+  }
+
+  @Override
+  public long getSeqNum() {
+    return tPathsUpdate.getSeqNum();
+  }
+
+  @Override
+  public void setSeqNum(long seqNum) {
+    tPathsUpdate.setSeqNum(seqNum);
+  }
+
+  public TPathsUpdate getThriftObject() {
+    return tPathsUpdate;
+  }
+
+  
+
+  public static List<String> cleanPath(String path) {
+    try {
+      return Lists.newArrayList(new URI(path).getPath().split("^/")[1]
+          .split("/"));
+    } catch (URISyntaxException e) {
+      throw new RuntimeException("Incomprehensible path [" + path + "]");
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PermissionsUpdate.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PermissionsUpdate.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PermissionsUpdate.java
new file mode 100644
index 0000000..c9ed96e
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/PermissionsUpdate.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedList;
+
+import org.apache.sentry.hdfs.service.thrift.TPermissionsUpdate;
+import org.apache.sentry.hdfs.service.thrift.TPrivilegeChanges;
+import org.apache.sentry.hdfs.service.thrift.TRoleChanges;
+
+public class PermissionsUpdate implements Updateable.Update {
+
+  public static String ALL_AUTHZ_OBJ = "__ALL_AUTHZ_OBJ__";
+  public static String ALL_PRIVS = "__ALL_PRIVS__";
+  public static String ALL_ROLES = "__ALL_ROLES__";
+  public static String ALL_GROUPS = "__ALL_GROUPS__";
+
+  private final TPermissionsUpdate tPermUpdate;
+
+  public PermissionsUpdate(TPermissionsUpdate tPermUpdate) {
+    this.tPermUpdate = tPermUpdate;
+  }
+
+  public PermissionsUpdate(long seqNum, boolean hasFullImage) {
+    this.tPermUpdate = new TPermissionsUpdate(hasFullImage, seqNum,
+        new HashMap<String, TPrivilegeChanges>(),
+        new HashMap<String, TRoleChanges>());
+  }
+
+  @Override
+  public long getSeqNum() {
+    return tPermUpdate.getSeqNum();
+  }
+
+  @Override
+  public void setSeqNum(long seqNum) {
+    tPermUpdate.setSeqNum(seqNum);
+  }
+
+  @Override
+  public boolean hasFullImage() {
+    return tPermUpdate.isHasfullImage();
+  }
+
+  public TPrivilegeChanges addPrivilegeUpdate(String authzObj) {
+    if (tPermUpdate.getPrivilegeChanges().containsKey(authzObj)) {
+      return tPermUpdate.getPrivilegeChanges().get(authzObj);
+    }
+    TPrivilegeChanges privUpdate = new TPrivilegeChanges(authzObj,
+        new HashMap<String, String>(), new HashMap<String, String>());
+    tPermUpdate.getPrivilegeChanges().put(authzObj, privUpdate);
+    return privUpdate;
+  }
+
+  public TRoleChanges addRoleUpdate(String role) {
+    if (tPermUpdate.getRoleChanges().containsKey(role)) {
+      return tPermUpdate.getRoleChanges().get(role);
+    }
+    TRoleChanges roleUpdate = new TRoleChanges(role, new LinkedList<String>(),
+        new LinkedList<String>());
+    tPermUpdate.getRoleChanges().put(role, roleUpdate);
+    return roleUpdate;
+  }
+
+  public Collection<TRoleChanges> getRoleUpdates() {
+    return tPermUpdate.getRoleChanges().values();
+  }
+
+  public Collection<TPrivilegeChanges> getPrivilegeUpdates() {
+    return tPermUpdate.getPrivilegeChanges().values();
+  }
+
+  public TPermissionsUpdate getThriftObject() {
+    return tPermUpdate;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/b86a53d1/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/Updateable.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/Updateable.java b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/Updateable.java
new file mode 100644
index 0000000..1649ffc
--- /dev/null
+++ b/sentry-hdfs/src/main/java/org/apache/sentry/hdfs/Updateable.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.concurrent.locks.ReadWriteLock;
+
+public interface Updateable<K extends Updateable.Update> {
+  
+  public interface Update {
+
+    boolean hasFullImage();
+    
+    long getSeqNum();
+
+    void setSeqNum(long seqNum);
+
+  }
+
+  /**
+   * Apply multiple partial updates in order
+   * @param update
+   * @param lock External Lock. 
+   * @return
+   */
+  public void updatePartial(Iterable<K> update, ReadWriteLock lock);
+
+  /**
+   * This returns a new object with the full update applied
+   * @param update
+   * @return
+   */
+  public Updateable<K> updateFull(K update);
+
+  /**
+   * Return sequence number of Last Update
+   */
+  public long getLastUpdatedSeqNum();
+
+  /**
+   * Create and Full image update of the local data structure
+   * @param currSeqNum
+   * @return
+   */
+  public K createFullImageUpdate(long currSeqNum);
+
+}


Mime
View raw message