From 8f3989d9ad32167a2d259377f5634b2ee32ee6ed Mon Sep 17 00:00:00 2001 From: Venkatesan Ranganathan Date: Mon, 26 Sep 2016 05:34:13 -0700 Subject: [PATCH] SQOOP-3010: Sqoop should not allow --as-parquetfile with hcatalog jobs or when hive import with create-hive-table is used (Sowmya Ramesh via Venkat Ranganathan) --- .../org/apache/sqoop/tool/BaseSqoopTool.java | 14 +++++++ .../cloudera/sqoop/hive/TestHiveImport.java | 21 ++++++++++ .../apache/sqoop/hcat/TestHCatalogBasic.java | 40 +++++++++++++++++++ 3 files changed, 75 insertions(+) diff --git a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java index fecdf43e..b71bc5e7 100644 --- a/src/java/org/apache/sqoop/tool/BaseSqoopTool.java +++ b/src/java/org/apache/sqoop/tool/BaseSqoopTool.java @@ -1421,6 +1421,14 @@ protected void validateHiveOptions(SqoopOptions options) + "importing into SequenceFile format."); } + // Hive import and create hive table not compatible for ParquetFile format + if (options.doHiveImport() + && options.doFailIfHiveTableExists() + && options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) { + throw new InvalidOptionsException("Hive import and create hive table is not compatible with " + + "importing into ParquetFile format."); + } + if (options.doHiveImport() && options.isAppendMode() && !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) { @@ -1598,6 +1606,12 @@ protected void validateHCatalogOptions(SqoopOptions options) + " option." + HELP_STR); } + if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) { + throw new InvalidOptionsException("HCatalog job is not compatible with " + + "SequenceFile format option " + FMT_PARQUETFILE_ARG + + " option." + HELP_STR); + } + if (options.getHCatalogPartitionKeys() != null && options.getHCatalogPartitionValues() == null) { throw new InvalidOptionsException("Either both --hcatalog-partition-keys" diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java index b6269648..26d087b0 100644 --- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java +++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java @@ -402,6 +402,27 @@ public void testAppendHiveImportAsParquet() throws IOException { {"test2", 4242, "somestring2"}, {"test", 42, "somestring"}}); } + /** + * Test hive create and --as-parquetfile options validation. + */ + @Test + public void testCreateHiveImportAsParquet() throws ParseException { + final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET"; + setCurTableName(TABLE_NAME); + setNumCols(3); + String [] extraArgs = {"--as-parquetfile", "--create-hive-table"}; + ImportTool tool = new ImportTool(); + + try { + tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null, + null, true)); + fail("Expected InvalidOptionsException"); + } catch (InvalidOptionsException ex) { + /* success */ + } + } + + /** Test that dates are coerced properly to strings. */ @Test public void testDate() throws IOException { diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java index 5cd4c26a..54b4552a 100644 --- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java +++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java @@ -164,6 +164,24 @@ public void testHCatExportWithExportDir() throws Exception { } } + public void testHCatExportWithParquetFile() throws Exception { + String[] args = { + "--connect", + "jdbc:db:url", + "--table", + "dbtable", + "--hcatalog-table", + "table", + "--as-parquetfile", + }; + try { + SqoopOptions opts = parseExportArgs(args); + exportTool.validateOptions(opts); + fail("Expected InvalidOptionsException"); + } catch (SqoopOptions.InvalidOptionsException ioe) { + // expected. + } + } public void testHCatImportWithSequenceFile() throws Exception { String[] args = { @@ -184,6 +202,28 @@ public void testHCatImportWithSequenceFile() throws Exception { } } + public void testHCatImportWithParquetFile() throws Exception { + String[] args = { + "--hcatalog-table", + "table", + "--create-hcatalog-table", + "--connect", + "jdbc:db:url", + "--table", + "dbtable", + "--hcatalog-table", + "table", + "--as-parquetfile", + }; + try { + SqoopOptions opts = parseImportArgs(args); + importTool.validateOptions(opts); + fail("Expected InvalidOptionsException"); + } catch (SqoopOptions.InvalidOptionsException ioe) { + // expected. + } + } + public void testHCatImportWithAvroFile() throws Exception { String[] args = { "--connect",