diff --git a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java index 1d67a2d1..6f13fe23 100644 --- a/src/test/com/cloudera/sqoop/hive/TestHiveImport.java +++ b/src/test/com/cloudera/sqoop/hive/TestHiveImport.java @@ -468,6 +468,8 @@ public void testCreateHiveImportAsParquet() throws ParseException, InvalidOption ImportTool tool = new ImportTool(); thrown.expect(InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during Hive table creation with " + + "--as-parquetfile"); tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null, null, true)); } @@ -509,6 +511,7 @@ public void testHiveExitFails() throws IOException { String [] vals = { "3.14159", "'foo'" }; thrown.expect(IOException.class); + thrown.reportMissingExceptionWithMessage("Expected IOException on erroneous Hive exit status"); runImportTest(TABLE_NAME, types, vals, "failingImport.q", getArgv(false, null), new ImportTool()); } @@ -641,6 +644,8 @@ public void testHiveDropAndReplaceOptionValidation() throws ParseException, Inva ImportTool tool = new ImportTool(); thrown.expect(InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException with conflicting Hive delimiter " + + "drop/replace options"); tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null, null, true)); } @@ -698,16 +703,22 @@ public void testImportWithBadPartitionKey() throws IOException { // Test hive-import with the 1st args. thrown.expect(IOException.class); + thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " + + "as importing column"); runImportTest(TABLE_NAME, types, vals, "partitionImport.q", getArgv(false, moreArgs1), new ImportTool()); // Test hive-import with the 2nd args. thrown.expect(IOException.class); + thrown.reportMissingExceptionWithMessage("Expected IOException during Hive import with partition key " + + "as importing column"); runImportTest(TABLE_NAME, types, vals, "partitionImport.q", getArgv(false, moreArgs2), new ImportTool()); // Test create-hive-table with the 1st args. thrown.expect(IOException.class); + thrown.reportMissingExceptionWithMessage("Expected IOException during Hive table creation with partition key " + + "as importing column"); runImportTest(TABLE_NAME, types, vals, "partitionImport.q", getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool()); } diff --git a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java index 4db629fe..6af12da1 100644 --- a/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java +++ b/src/test/com/cloudera/sqoop/hive/TestTableDefWriter.java @@ -55,6 +55,7 @@ public class TestTableDefWriter { // Test getHiveOctalCharCode and expect an IllegalArgumentException. private void expectExceptionInCharCode(int charCode) { thrown.expect(IllegalArgumentException.class); + thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException with out-of-range Hive delimiter"); TableDefWriter.getHiveOctalCharCode(charCode); } @@ -221,6 +222,7 @@ public void testUserMappingFailWhenCantBeApplied() throws Exception { writer.setColumnTypes(colTypes); thrown.expect(IllegalArgumentException.class); + thrown.reportMissingExceptionWithMessage("Expected IllegalArgumentException on non applied Hive type mapping"); String createTable = writer.getCreateTableStmt(); } diff --git a/src/test/com/cloudera/sqoop/io/TestCodecMap.java b/src/test/com/cloudera/sqoop/io/TestCodecMap.java index 982b6ad3..c78a5aed 100644 --- a/src/test/com/cloudera/sqoop/io/TestCodecMap.java +++ b/src/test/com/cloudera/sqoop/io/TestCodecMap.java @@ -70,7 +70,9 @@ public void testGetShortName() throws UnsupportedCodecException { verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec"); verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec"); - thrown.expect(IOException.class); + thrown.expect(UnsupportedCodecException.class); + thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name during getting " + + "short codec name"); verifyShortName("NONE", "bogus"); } @@ -83,6 +85,7 @@ private void verifyShortName(String expected, String codecName) @Test public void testUnrecognizedCodec() throws UnsupportedCodecException { thrown.expect(UnsupportedCodecException.class); + thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException with invalid codec name"); CodecMap.getCodec("bogus", new Configuration()); } diff --git a/src/test/com/cloudera/sqoop/io/TestLobFile.java b/src/test/com/cloudera/sqoop/io/TestLobFile.java index 7f8ca6d6..029758c5 100644 --- a/src/test/com/cloudera/sqoop/io/TestLobFile.java +++ b/src/test/com/cloudera/sqoop/io/TestLobFile.java @@ -143,6 +143,7 @@ private void verifyClobFile(Path p, String... expectedRecords) reader.close(); thrown.expect(IOException.class); + thrown.reportMissingExceptionWithMessage("Expected IOException calling next after close"); reader.next(); // A second close shouldn't hurt anything. This should be a no-op. @@ -590,6 +591,7 @@ public void testCompressedFile() throws Exception { runCompressedTest(CodecMap.DEFLATE); thrown.expect(UnsupportedCodecException.class); + thrown.reportMissingExceptionWithMessage("Expected UnsupportedCodecException for lzo"); runCompressedTest(CodecMap.LZO); } diff --git a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java index 71a371d8..104effbb 100644 --- a/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java +++ b/src/test/org/apache/sqoop/hcat/TestHCatalogBasic.java @@ -110,6 +110,8 @@ public void testHCatImportWithTargetDir() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --target-dir"); importTool.validateOptions(opts); } @@ -128,6 +130,8 @@ public void testHCatImportWithWarehouseDir() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --warehouse-dir"); importTool.validateOptions(opts); } @@ -145,6 +149,8 @@ public void testHCatImportWithHiveImport() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --hive-import"); importTool.validateOptions(opts); } @@ -163,6 +169,8 @@ public void testHCatExportWithExportDir() throws Exception { SqoopOptions opts = parseExportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " + + "with --export-dir"); exportTool.validateOptions(opts); } @@ -180,6 +188,8 @@ public void testHCatExportWithParquetFile() throws Exception { SqoopOptions opts = parseExportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog export " + + "with --as-parquetfile"); exportTool.validateOptions(opts); } @@ -197,6 +207,8 @@ public void testHCatImportWithSequenceFile() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --as-sequencefile"); importTool.validateOptions(opts); } @@ -217,6 +229,8 @@ public void testHCatImportWithParquetFile() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --as-parquetfile"); importTool.validateOptions(opts); } @@ -234,6 +248,8 @@ public void testHCatImportWithAvroFile() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --as-avrodatafile"); importTool.validateOptions(opts); } @@ -278,6 +294,8 @@ public void testHCatImportWithCreateTableAndDropAndCreateTable() SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with --drop-and-create-hcatalog-table"); importTool.validateOptions(opts); } @@ -331,6 +349,8 @@ public void testHCatImportWithOnlyHCatKeys() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with only HCatalog keys"); importTool.validateOptions(opts); } @@ -351,6 +371,8 @@ public void testHCatImportWithMismatchedKeysAndVals() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with mismatched keys and values"); importTool.validateOptions(opts); } @@ -371,6 +393,8 @@ public void testHCatImportWithEmptyKeysAndVals() throws Exception { SqoopOptions opts = parseImportArgs(args); thrown.expect(SqoopOptions.InvalidOptionsException.class); + thrown.reportMissingExceptionWithMessage("Expected InvalidOptionsException during HCatalog import " + + "with empty keys and values"); importTool.validateOptions(opts); } diff --git a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java index adb795ea..911749f4 100644 --- a/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java +++ b/src/test/org/apache/sqoop/mapreduce/db/TestTextSplitter.java @@ -146,6 +146,8 @@ public void testAlphabetSplitWhenMinStringGreaterThanMaxString() throws SQLExcep TextSplitter splitter = new TextSplitter(); thrown.expect(ValidationException.class); + thrown.reportMissingExceptionWithMessage("Expected ValidationException during splitting " + + "when min string greater than max string"); splitter.split(4, "Z", "A", ""); }