sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkatran...@apache.org
Subject sqoop git commit: SQOOP-3010: Sqoop should not allow --as-parquetfile with hcatalog jobs or when hive import with create-hive-table is used
Date Mon, 26 Sep 2016 12:34:50 GMT
Repository: sqoop
Updated Branches:
  refs/heads/trunk b007e4d59 -> 8f3989d9a


SQOOP-3010: Sqoop should not allow --as-parquetfile with hcatalog jobs or when hive import
with create-hive-table is used

  (Sowmya Ramesh via Venkat Ranganathan)


Project: http://git-wip-us.apache.org/repos/asf/sqoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/sqoop/commit/8f3989d9
Tree: http://git-wip-us.apache.org/repos/asf/sqoop/tree/8f3989d9
Diff: http://git-wip-us.apache.org/repos/asf/sqoop/diff/8f3989d9

Branch: refs/heads/trunk
Commit: 8f3989d9ad32167a2d259377f5634b2ee32ee6ed
Parents: b007e4d
Author: Venkatesan Ranganathan <venkat@hortonworks.com>
Authored: Mon Sep 26 05:34:13 2016 -0700
Committer: Venkatesan Ranganathan <venkat@hortonworks.com>
Committed: Mon Sep 26 05:34:13 2016 -0700

----------------------------------------------------------------------
 .../org/apache/sqoop/tool/BaseSqoopTool.java    | 14 +++++++
 .../com/cloudera/sqoop/hive/TestHiveImport.java | 21 ++++++++++
 .../apache/sqoop/hcat/TestHCatalogBasic.java    | 40 ++++++++++++++++++++
 3 files changed, 75 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sqoop/blob/8f3989d9/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
index fecdf43..b71bc5e 100644
--- a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
+++ b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java
@@ -1421,6 +1421,14 @@ public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool
{
         + "importing into SequenceFile format.");
     }
 
+    // Hive import and create hive table not compatible for ParquetFile format
+    if (options.doHiveImport()
+        && options.doFailIfHiveTableExists()
+        && options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
+      throw new InvalidOptionsException("Hive import and create hive table is not compatible
with "
+        + "importing into ParquetFile format.");
+      }
+
     if (options.doHiveImport()
         && options.isAppendMode()
         && !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) {
@@ -1598,6 +1606,12 @@ public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool
{
         + " option." + HELP_STR);
     }
 
+    if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
+      throw new InvalidOptionsException("HCatalog job  is not compatible with "
+        + "SequenceFile format option " + FMT_PARQUETFILE_ARG
+        + " option." + HELP_STR);
+    }
+
     if (options.getHCatalogPartitionKeys() != null
         && options.getHCatalogPartitionValues() == null) {
       throw new InvalidOptionsException("Either both --hcatalog-partition-keys"

http://git-wip-us.apache.org/repos/asf/sqoop/blob/8f3989d9/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
----------------------------------------------------------------------
diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
index b626964..26d087b 100644
--- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
+++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java
@@ -402,6 +402,27 @@ public class TestHiveImport extends ImportJobTestCase {
         {"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
   }
 
+  /**
+   * Test hive create and --as-parquetfile options validation.
+   */
+  @Test
+  public void testCreateHiveImportAsParquet() throws ParseException {
+    final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
+    setCurTableName(TABLE_NAME);
+    setNumCols(3);
+    String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
+    ImportTool tool = new ImportTool();
+
+    try {
+      tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
+          null, true));
+      fail("Expected InvalidOptionsException");
+    } catch (InvalidOptionsException ex) {
+      /* success */
+    }
+  }
+
+
   /** Test that dates are coerced properly to strings. */
   @Test
   public void testDate() throws IOException {

http://git-wip-us.apache.org/repos/asf/sqoop/blob/8f3989d9/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
----------------------------------------------------------------------
diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
index 5cd4c26..54b4552 100644
--- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
+++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java
@@ -164,6 +164,24 @@ public class TestHCatalogBasic extends TestCase {
     }
   }
 
+  public void testHCatExportWithParquetFile() throws Exception {
+    String[] args = {
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--as-parquetfile",
+    };
+    try {
+      SqoopOptions opts = parseExportArgs(args);
+      exportTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
 
   public void testHCatImportWithSequenceFile() throws Exception {
     String[] args = {
@@ -184,6 +202,28 @@ public class TestHCatalogBasic extends TestCase {
     }
   }
 
+  public void testHCatImportWithParquetFile() throws Exception {
+    String[] args = {
+      "--hcatalog-table",
+      "table",
+      "--create-hcatalog-table",
+      "--connect",
+      "jdbc:db:url",
+      "--table",
+      "dbtable",
+      "--hcatalog-table",
+      "table",
+      "--as-parquetfile",
+    };
+    try {
+      SqoopOptions opts = parseImportArgs(args);
+      importTool.validateOptions(opts);
+      fail("Expected InvalidOptionsException");
+    } catch (SqoopOptions.InvalidOptionsException ioe) {
+      // expected.
+    }
+  }
+
   public void testHCatImportWithAvroFile() throws Exception {
     String[] args = {
       "--connect",


Mime
View raw message