sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jar...@apache.org
Subject sqoop git commit: SQOOP-1779: Add support for --hive-database when importing Parquet files into Hive
Date Sat, 22 Nov 2014 22:36:29 GMT
Repository: sqoop
Updated Branches:
  refs/heads/trunk cee138c21 -> 52604b166


SQOOP-1779: Add support for --hive-database when importing Parquet files into Hive

(Josh Wills via Jarek Jarcec Cecho)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/52604b16
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/52604b16
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/52604b16

Branch: refs/heads/trunk
Commit: 52604b1661a8e4e32dec759343f6c72b8546766e
Parents: cee138c
Author: Jarek Jarcec Cecho <jarcec@apache.org>
Authored: Sat Nov 22 14:36:11 2014 -0800
Committer: Jarek Jarcec Cecho <jarcec@apache.org>
Committed: Sat Nov 22 14:36:11 2014 -0800

----------------------------------------------------------------------
 .../sqoop/mapreduce/DataDrivenImportJob.java    | 21 +++++++++++++-------
 1 file changed, 14 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/52604b16/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
index 71abeeb..708da5b 100644
--- a/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
+++ b/src/java/org/apache/sqoop/mapreduce/DataDrivenImportJob.java
@@ -102,19 +102,26 @@ public class DataDrivenImportJob extends ImportJobBase {
       // An Avro schema is required for creating a dataset that manages
       // Parquet data records. The import will fail, if schema is invalid.
       Schema schema = generateAvroSchema(tableName);
-      String uri;
-      if (options.doHiveImport()) {
-        uri = "dataset:hive?dataset=" + options.getHiveTableName();
-      } else {
-        FileSystem fs = FileSystem.get(conf);
-        uri = "dataset:" + fs.makeQualified(getContext().getDestination());
-      }
+      String uri = getKiteUri(conf, tableName);
       ParquetJob.configureImportJob(conf, schema, uri, options.isAppendMode());
     }
 
     job.setMapperClass(getMapperClass());
   }
 
+  private String getKiteUri(Configuration conf, String tableName) throws IOException {
+    if (options.doHiveImport()) {
+      String hiveDatabase = options.getHiveDatabaseName() == null ? "default" :
+          options.getHiveDatabaseName();
+      String hiveTable = options.getHiveTableName() == null ? tableName :
+          options.getHiveTableName();
+      return String.format("dataset:hive:/%s/%s", hiveDatabase, hiveTable);
+    } else {
+      FileSystem fs = FileSystem.get(conf);
+      return "dataset:" + fs.makeQualified(getContext().getDestination());
+    }
+  }
+
   private Schema generateAvroSchema(String tableName) throws IOException {
     ConnManager connManager = getContext().getConnManager();
     AvroSchemaGenerator generator = new AvroSchemaGenerator(options,


Mime
View raw message