hive-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Work logged] (HIVE-12971) Hive Support for Kudu
Date Tue, 23 Jul 2019 06:57:09 GMT

     [ https://issues.apache.org/jira/browse/HIVE-12971?focusedWorklogId=280838&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-280838
]

ASF GitHub Bot logged work on HIVE-12971:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 23/Jul/19 06:56
            Start Date: 23/Jul/19 06:56
    Worklog Time Spent: 10m 
      Work Description: jcamachor commented on pull request #733: HIVE-12971: Add Support
for Kudu Tables
URL: https://github.com/apache/hive/pull/733#discussion_r306135505
 
 

 ##########
 File path: kudu-handler/src/java/org/apache/hadoop/hive/kudu/KuduHiveUtils.java
 ##########
 @@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.kudu;
+
+import java.security.AccessController;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import javax.security.auth.Subject;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.utils.StringUtils;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.Token;
+import org.apache.kudu.ColumnTypeAttributes;
+import org.apache.kudu.Type;
+import org.apache.kudu.client.KuduClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.hadoop.hive.kudu.KuduStorageHandler.KUDU_MASTER_ADDRS_KEY;
+
+/**
+ * A collection of static utility methods for the Kudu Hive integration.
+ * This is useful for code sharing.
+ */
+public final class KuduHiveUtils {
+
+  private static final Logger LOG = LoggerFactory.getLogger(KuduHiveUtils.class);
+
+  private static final Text KUDU_TOKEN_KIND = new Text("kudu-authn-data");
+
+  private KuduHiveUtils() {}
+
+  /**
+   * Returns the union of the configuration and table properties with the
+   * table properties taking precedence.
+   */
+  public static Configuration createOverlayedConf(Configuration conf, Properties tblProps)
{
+    Configuration newConf = new Configuration(conf);
+    for (Map.Entry<Object, Object> prop : tblProps.entrySet()) {
+      newConf.set((String) prop.getKey(), (String) prop.getValue());
+    }
+    return newConf;
+  }
+
+  public static String getMasterAddresses(Configuration conf) {
+    // Load the default configuration.
+    String masterAddresses = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_KUDU_MASTER_ADDRESSES_DEFAULT);
+    if (StringUtils.isEmpty(masterAddresses)) {
+      throw new IllegalStateException("Kudu master addresses not specified in configuration");
+    }
+    // Override with the table configuration if it exists.
+    if (!StringUtils.isEmpty(conf.get(KUDU_MASTER_ADDRS_KEY))) {
+      masterAddresses = conf.get(KUDU_MASTER_ADDRS_KEY);
+    }
+    return masterAddresses;
+  }
+
+  public static KuduClient getKuduClient(Configuration conf) {
+    String masterAddresses = getMasterAddresses(conf);
+    if (StringUtils.isEmpty(masterAddresses)) {
+      throw new IllegalArgumentException(KUDU_MASTER_ADDRS_KEY + " is not set.");
+    }
+    KuduClient client = new KuduClient.KuduClientBuilder(masterAddresses).build();
+    importCredentialsFromCurrentSubject(client);
+    return client;
+  }
+
+  public static void importCredentialsFromCurrentSubject(KuduClient client) {
+    Subject subj = Subject.getSubject(AccessController.getContext());
+    if (subj == null) {
+      return;
+    }
+    Text service = new Text(client.getMasterAddressesAsString());
+    // Find the Hadoop credentials stored within the JAAS subject.
+    Set<Credentials> credSet = subj.getPrivateCredentials(Credentials.class);
+    for (Credentials creds : credSet) {
+      for (Token<?> tok : creds.getAllTokens()) {
+        if (!tok.getKind().equals(KUDU_TOKEN_KIND)) {
+          continue;
+        }
+        // Only import credentials relevant to the service corresponding to
+        // 'client'. This is necessary if we want to support a job which
+        // reads from one cluster and writes to another.
+        if (!tok.getService().equals(service)) {
+          LOG.debug("Not importing credentials for service " + service +
+              "(expecting service " + service + ")");
+          continue;
+        }
+        LOG.debug("Importing credentials for service " + service);
+        client.importAuthenticationCredentials(tok.getPassword());
+        return;
+      }
+    }
+  }
+
+  /* This method converts a Kudu type to to the corresponding Hive type */
+  public static PrimitiveTypeInfo toHiveType(Type kuduType, ColumnTypeAttributes attributes)
+      throws SerDeException {
+    switch (kuduType) {
+    case BOOL: return TypeInfoFactory.booleanTypeInfo;
 
 Review comment:
   Wrt style, use new line for return statement.
 
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 280838)
    Time Spent: 1.5h  (was: 1h 20m)

> Hive Support for Kudu
> ---------------------
>
>                 Key: HIVE-12971
>                 URL: https://issues.apache.org/jira/browse/HIVE-12971
>             Project: Hive
>          Issue Type: New Feature
>    Affects Versions: 2.0.0
>            Reporter: Lenni Kuff
>            Assignee: Grant Henke
>            Priority: Major
>              Labels: pull-request-available
>         Attachments: HIVE-12971.0.patch, HIVE-12971.1.patch, HIVE-12971.2.patch, HIVE-12971.3.patch
>
>          Time Spent: 1.5h
>  Remaining Estimate: 0h
>
> JIRA for tracking work related to Hive/Kudu integration.
> It would be useful to allow Kudu data to be accessible via Hive. This would involve creating
a Kudu SerDe/StorageHandler and implementing support for QUERY and DML commands like SELECT,
INSERT, UPDATE, and DELETE. Kudu Input/OutputFormats classes already exist. The work can be
staged to support this functionality incrementally.



--
This message was sent by Atlassian JIRA
(v7.6.14#76016)

Mime
View raw message