mirror of
https://github.com/apache/sqoop.git
synced 2025-05-03 16:09:27 +08:00
SQOOP-212. Hive import for existing table does not work.
This patch fixes a bug that prevents importing data into an existing hive table with the 'hive-overwrite' argument set. From: Ahmed Radwan <ahmed@cloudera.com> git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1150043 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
cb911f34b4
commit
eac50d0e24
@ -26,8 +26,11 @@ Hive options
|
||||
If set, then import the table into Hive
|
||||
|
||||
--hive-overwrite::
|
||||
Overwrites existing table in hive.
|
||||
By default it does not overwrite existing table.
|
||||
Overwrites existing data in the hive table if it exists.
|
||||
|
||||
--create-hive-table::
|
||||
If set, then the job will fail if the target hive table exits.
|
||||
By default this property is false.
|
||||
|
||||
--hive-table (table-name)::
|
||||
When used with --hive-import, overrides the destination table name
|
||||
|
@ -32,8 +32,11 @@ Hive options
|
||||
Override $HIVE_HOME
|
||||
|
||||
--hive-overwrite::
|
||||
Overwrites existing table in hive.
|
||||
By default it does not overwrite existing table.
|
||||
Overwrites existing data in the hive table if it exists.
|
||||
|
||||
--create-hive-table::
|
||||
If set, then the job will fail if the target hive table exits.
|
||||
By default this property is false.
|
||||
|
||||
--hive-table (table-name)::
|
||||
When used with --hive-import, overrides the destination table name
|
||||
|
@ -48,10 +48,12 @@ Argument Description
|
||||
--------------------------------------------------------------------------
|
||||
+\--hive-home <dir>+ Override +$HIVE_HOME+
|
||||
+\--hive-overwrite+ Overwrite existing data in the Hive table.
|
||||
+\--create-hive-table+ If set, then the job will fail if the target hive
|
||||
table exits. By default this property is false.
|
||||
+\--hive-table <table-name>+ Sets the table name to use when importing \
|
||||
to Hive.
|
||||
+\--table+ The database table to read the \
|
||||
definition from.
|
||||
definition from.
|
||||
--------------------------------------------------------------------------
|
||||
|
||||
include::output-args.txt[]
|
||||
|
@ -26,6 +26,8 @@ Argument Description
|
||||
+\--hive-import+ Import tables into Hive (Uses Hive's \
|
||||
default delimiters if none are set.)
|
||||
+\--hive-overwrite+ Overwrite existing data in the Hive table.
|
||||
+\--create-hive-table+ If set, then the job will fail if the target hive
|
||||
table exits. By default this property is false.
|
||||
+\--hive-table <table-name>+ Sets the table name to use when importing\
|
||||
to Hive.
|
||||
+\--hive-drop-import-delims+ Drops '\n', '\r', and '\01' from string\
|
||||
|
@ -145,6 +145,8 @@ public enum IncrementalMode {
|
||||
private String hiveHome; // not serialized to metastore.
|
||||
@StoredAsProperty("hive.import") private boolean hiveImport;
|
||||
@StoredAsProperty("hive.overwrite.table") private boolean overwriteHiveTable;
|
||||
@StoredAsProperty("hive.fail.table.exists")
|
||||
private boolean failIfHiveTableExists;
|
||||
@StoredAsProperty("hive.table.name") private String hiveTableName;
|
||||
@StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
|
||||
@StoredAsProperty("hive.partition.key") private String hivePartitionKey;
|
||||
@ -1010,6 +1012,18 @@ public void setHiveDropDelims(boolean dropHiveDelims) {
|
||||
this.hiveDropDelims = dropHiveDelims;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the user-specified option to specify sqoop's behavior during
|
||||
* target table creation if the table exists.
|
||||
*/
|
||||
public boolean doFailIfHiveTableExists() {
|
||||
return failIfHiveTableExists;
|
||||
}
|
||||
|
||||
public void setFailIfHiveTableExists(boolean fail) {
|
||||
this.failIfHiveTableExists = fail;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return location where .java files go; guaranteed to end with '/'.
|
||||
*/
|
||||
|
@ -129,7 +129,7 @@ public String getCreateTableStmt() throws IOException {
|
||||
|
||||
String [] colNames = getColumnNames();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (options.doOverwriteHiveTable()) {
|
||||
if (options.doFailIfHiveTableExists()) {
|
||||
sb.append("CREATE TABLE `").append(outputTableName).append("` ( ");
|
||||
} else {
|
||||
sb.append("CREATE TABLE IF NOT EXISTS `");
|
||||
@ -209,8 +209,11 @@ public String getLoadDataStmt() throws IOException {
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("LOAD DATA INPATH '");
|
||||
sb.append(finalPathStr);
|
||||
sb.append("' INTO TABLE `");
|
||||
sb.append(finalPathStr + "'");
|
||||
if (options.doOverwriteHiveTable()) {
|
||||
sb.append(" OVERWRITE");
|
||||
}
|
||||
sb.append(" INTO TABLE `");
|
||||
sb.append(outputTableName);
|
||||
sb.append('`');
|
||||
|
||||
|
@ -92,6 +92,8 @@ public abstract class BaseSqoopTool extends SqoopTool {
|
||||
public static final String HIVE_DROP_DELIMS_ARG = "hive-drop-import-delims";
|
||||
public static final String HIVE_PARTITION_KEY_ARG = "hive-partition-key";
|
||||
public static final String HIVE_PARTITION_VALUE_ARG = "hive-partition-value";
|
||||
public static final String CREATE_HIVE_TABLE_ARG =
|
||||
"create-hive-table";
|
||||
public static final String NUM_MAPPERS_ARG = "num-mappers";
|
||||
public static final String NUM_MAPPERS_SHORT_ARG = "m";
|
||||
public static final String COMPRESS_ARG = "compress";
|
||||
@ -398,6 +400,10 @@ protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
|
||||
.withDescription("Overwrite existing data in the Hive table")
|
||||
.withLongOpt(HIVE_OVERWRITE_ARG)
|
||||
.create());
|
||||
hiveOpts.addOption(OptionBuilder
|
||||
.withDescription("Fail if the target hive table exists")
|
||||
.withLongOpt(CREATE_HIVE_TABLE_ARG)
|
||||
.create());
|
||||
hiveOpts.addOption(OptionBuilder.withArgName("table-name")
|
||||
.hasArg()
|
||||
.withDescription("Sets the table name to use when importing to hive")
|
||||
@ -669,6 +675,10 @@ protected void applyHiveOptions(CommandLine in, SqoopOptions out)
|
||||
out.setOverwriteHiveTable(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(CREATE_HIVE_TABLE_ARG)) {
|
||||
out.setFailIfHiveTableExists(true);
|
||||
}
|
||||
|
||||
if (in.hasOption(HIVE_TABLE_ARG)) {
|
||||
out.setHiveTableName(in.getOptionValue(HIVE_TABLE_ARG));
|
||||
}
|
||||
|
@ -245,7 +245,7 @@ public void testCreateOverwriteHiveImport() throws IOException {
|
||||
setNumCols(3);
|
||||
String [] types = { "VARCHAR(32)", "INTEGER", "CHAR(64)" };
|
||||
String [] vals = { "'test'", "42", "'somestring'" };
|
||||
String [] extraArgs = {"--hive-overwrite"};
|
||||
String [] extraArgs = {"--hive-overwrite", "--create-hive-table"};
|
||||
runImportTest(TABLE_NAME, types, vals,
|
||||
"createOverwriteImport.q", getCreateHiveTableArgs(extraArgs),
|
||||
new CreateHiveTableTool());
|
||||
|
Loading…
Reference in New Issue
Block a user