mirror of
https://github.com/apache/sqoop.git
synced 2025-05-12 23:11:43 +08:00
SQOOP-489. Cannot define partition keys for Hive tables created through Sqoop.
(Cheolsoo Park via Jarek Jarcec Cecho) git-svn-id: https://svn.apache.org/repos/asf/sqoop/trunk@1344429 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b0de5ca47c
commit
4505205588
@ -155,7 +155,13 @@ public String getCreateTableStmt() throws IOException {
|
|||||||
}
|
}
|
||||||
|
|
||||||
boolean first = true;
|
boolean first = true;
|
||||||
|
String partitionKey = options.getHivePartitionKey();
|
||||||
for (String col : colNames) {
|
for (String col : colNames) {
|
||||||
|
if (col.equals(partitionKey)) {
|
||||||
|
throw new IllegalArgumentException("Partition key " + col + " cannot "
|
||||||
|
+ "be a column to import.");
|
||||||
|
}
|
||||||
|
|
||||||
if (!first) {
|
if (!first) {
|
||||||
sb.append(", ");
|
sb.append(", ");
|
||||||
}
|
}
|
||||||
@ -188,9 +194,9 @@ public String getCreateTableStmt() throws IOException {
|
|||||||
sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
|
sb.append("COMMENT 'Imported by sqoop on " + curDateStr + "' ");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (options.getHivePartitionKey() != null) {
|
if (partitionKey != null) {
|
||||||
sb.append("PARTITIONED BY (")
|
sb.append("PARTITIONED BY (")
|
||||||
.append(options.getHivePartitionKey())
|
.append(partitionKey)
|
||||||
.append(" STRING) ");
|
.append(" STRING) ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,6 +108,28 @@ private void setNumCols(int numCols) {
|
|||||||
return args.toArray(new String[0]);
|
return args.toArray(new String[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return the argv to supply to a create-table only job for Hive imports.
|
||||||
|
*/
|
||||||
|
protected String [] getCreateTableArgv(boolean includeHadoopFlags,
|
||||||
|
String [] moreArgs) {
|
||||||
|
|
||||||
|
ArrayList<String> args = new ArrayList<String>();
|
||||||
|
|
||||||
|
if (null != moreArgs) {
|
||||||
|
for (String arg: moreArgs) {
|
||||||
|
args.add(arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
args.add("--table");
|
||||||
|
args.add(getTableName());
|
||||||
|
args.add("--connect");
|
||||||
|
args.add(HsqldbTestServer.getUrl());
|
||||||
|
|
||||||
|
return args.toArray(new String[0]);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return the argv to supply to a code-gen only job for Hive imports.
|
* @return the argv to supply to a code-gen only job for Hive imports.
|
||||||
*/
|
*/
|
||||||
@ -452,4 +474,62 @@ public void testImportHiveWithPartitions() throws IOException,
|
|||||||
getArgv(false, moreArgs), new ImportTool());
|
getArgv(false, moreArgs), new ImportTool());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If partition key is set to one of importing columns, we should get an
|
||||||
|
* IOException.
|
||||||
|
* */
|
||||||
|
@Test
|
||||||
|
public void testImportWithBadPartitionKey() {
|
||||||
|
final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
|
||||||
|
|
||||||
|
LOG.info("Doing import of single row into " + TABLE_NAME + " table");
|
||||||
|
setCurTableName(TABLE_NAME);
|
||||||
|
setNumCols(3);
|
||||||
|
String[] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)", };
|
||||||
|
String[] vals = { "'key'", "42", "'I am a row in a partition'", };
|
||||||
|
|
||||||
|
String partitionKey = getColNames()[0];
|
||||||
|
|
||||||
|
// Specify 1st column as partition key and import every column of the
|
||||||
|
// table by default (i.e. no --columns option).
|
||||||
|
String[] moreArgs1 = {
|
||||||
|
"--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
|
||||||
|
partitionKey,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Specify 1st column as both partition key and importing column.
|
||||||
|
String[] moreArgs2 = {
|
||||||
|
"--" + BaseSqoopTool.HIVE_PARTITION_KEY_ARG,
|
||||||
|
partitionKey,
|
||||||
|
"--" + BaseSqoopTool.COLUMNS_ARG,
|
||||||
|
partitionKey,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Test hive-import with the 1st args.
|
||||||
|
try {
|
||||||
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
|
getArgv(false, moreArgs1), new ImportTool());
|
||||||
|
fail(TABLE_NAME + " test should have thrown IOException");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// expected; ok.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test hive-import with the 2nd args.
|
||||||
|
try {
|
||||||
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
|
getArgv(false, moreArgs2), new ImportTool());
|
||||||
|
fail(TABLE_NAME + " test should have thrown IOException");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// expected; ok.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test create-hive-table with the 1st args.
|
||||||
|
try {
|
||||||
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
|
getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
|
||||||
|
fail(TABLE_NAME + " test should have thrown IOException");
|
||||||
|
} catch (IOException ioe) {
|
||||||
|
// expected; ok.
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user