5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-03 08:00:10 +08:00

SQOOP-3010: Sqoop should not allow --as-parquetfile with hcatalog jobs or when hive import with create-hive-table is used

(Sowmya Ramesh via Venkat Ranganathan)
This commit is contained in:
Venkatesan Ranganathan 2016-09-26 05:34:13 -07:00
parent b007e4d59d
commit 8f3989d9ad
3 changed files with 75 additions and 0 deletions

View File

@ -1421,6 +1421,14 @@ protected void validateHiveOptions(SqoopOptions options)
+ "importing into SequenceFile format.");
}
// Hive import and create hive table not compatible for ParquetFile format
if (options.doHiveImport()
&& options.doFailIfHiveTableExists()
&& options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
throw new InvalidOptionsException("Hive import and create hive table is not compatible with "
+ "importing into ParquetFile format.");
}
if (options.doHiveImport()
&& options.isAppendMode()
&& !options.getIncrementalMode().equals(IncrementalMode.AppendRows)) {
@ -1598,6 +1606,12 @@ protected void validateHCatalogOptions(SqoopOptions options)
+ " option." + HELP_STR);
}
if (options.getFileLayout() == SqoopOptions.FileLayout.ParquetFile) {
throw new InvalidOptionsException("HCatalog job is not compatible with "
+ "SequenceFile format option " + FMT_PARQUETFILE_ARG
+ " option." + HELP_STR);
}
if (options.getHCatalogPartitionKeys() != null
&& options.getHCatalogPartitionValues() == null) {
throw new InvalidOptionsException("Either both --hcatalog-partition-keys"

View File

@ -402,6 +402,27 @@ public void testAppendHiveImportAsParquet() throws IOException {
{"test2", 4242, "somestring2"}, {"test", 42, "somestring"}});
}
/**
* Test hive create and --as-parquetfile options validation.
*/
@Test
public void testCreateHiveImportAsParquet() throws ParseException {
final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
setCurTableName(TABLE_NAME);
setNumCols(3);
String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
ImportTool tool = new ImportTool();
try {
tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
null, true));
fail("Expected InvalidOptionsException");
} catch (InvalidOptionsException ex) {
/* success */
}
}
/** Test that dates are coerced properly to strings. */
@Test
public void testDate() throws IOException {

View File

@ -164,6 +164,24 @@ public void testHCatExportWithExportDir() throws Exception {
}
}
public void testHCatExportWithParquetFile() throws Exception {
String[] args = {
"--connect",
"jdbc:db:url",
"--table",
"dbtable",
"--hcatalog-table",
"table",
"--as-parquetfile",
};
try {
SqoopOptions opts = parseExportArgs(args);
exportTool.validateOptions(opts);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expected.
}
}
public void testHCatImportWithSequenceFile() throws Exception {
String[] args = {
@ -184,6 +202,28 @@ public void testHCatImportWithSequenceFile() throws Exception {
}
}
public void testHCatImportWithParquetFile() throws Exception {
String[] args = {
"--hcatalog-table",
"table",
"--create-hcatalog-table",
"--connect",
"jdbc:db:url",
"--table",
"dbtable",
"--hcatalog-table",
"table",
"--as-parquetfile",
};
try {
SqoopOptions opts = parseImportArgs(args);
importTool.validateOptions(opts);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expected.
}
}
public void testHCatImportWithAvroFile() throws Exception {
String[] args = {
"--connect",