5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-21 03:10:49 +08:00

SQOOP-912: Add ability to specify --hive-database parameter

(Jarek Jarcec Cecho via Kate Ting)
This commit is contained in:
Kate Ting 2013-04-15 12:44:32 -04:00
parent c4b0eac2d0
commit 8b3742ad69
4 changed files with 50 additions and 2 deletions

View File

@ -145,6 +145,7 @@ public String toString() {
@StoredAsProperty("hive.fail.table.exists") @StoredAsProperty("hive.fail.table.exists")
private boolean failIfHiveTableExists; private boolean failIfHiveTableExists;
@StoredAsProperty("hive.table.name") private String hiveTableName; @StoredAsProperty("hive.table.name") private String hiveTableName;
@StoredAsProperty("hive.database.name") private String hiveDatabaseName;
@StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims; @StoredAsProperty("hive.drop.delims") private boolean hiveDropDelims;
@StoredAsProperty("hive.delims.replacement") @StoredAsProperty("hive.delims.replacement")
private String hiveDelimsReplacement; private String hiveDelimsReplacement;
@ -1618,6 +1619,14 @@ public void setHiveTableName(String name) {
this.hiveTableName = name; this.hiveTableName = name;
} }
public String getHiveDatabaseName() {
return this.hiveDatabaseName;
}
public void setHiveDatabaseName(String name) {
this.hiveDatabaseName = name;
}
public String getHivePartitionKey() { public String getHivePartitionKey() {
return hivePartitionKey; return hivePartitionKey;
} }

View File

@ -132,12 +132,16 @@ public String getCreateTableStmt() throws IOException {
String [] colNames = getColumnNames(); String [] colNames = getColumnNames();
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
if (options.doFailIfHiveTableExists()) { if (options.doFailIfHiveTableExists()) {
sb.append("CREATE TABLE `").append(outputTableName).append("` ( "); sb.append("CREATE TABLE `");
} else { } else {
sb.append("CREATE TABLE IF NOT EXISTS `"); sb.append("CREATE TABLE IF NOT EXISTS `");
sb.append(outputTableName).append("` ( ");
} }
if(options.getHiveDatabaseName() != null) {
sb.append(options.getHiveDatabaseName()).append("`.`");
}
sb.append(outputTableName).append("` ( ");
// Check that all explicitly mapped columns are present in result set // Check that all explicitly mapped columns are present in result set
for(Object column : userMapping.keySet()) { for(Object column : userMapping.keySet()) {
boolean found = false; boolean found = false;
@ -232,6 +236,9 @@ public String getLoadDataStmt() throws IOException {
sb.append(" OVERWRITE"); sb.append(" OVERWRITE");
} }
sb.append(" INTO TABLE `"); sb.append(" INTO TABLE `");
if(options.getHiveDatabaseName() != null) {
sb.append(options.getHiveDatabaseName()).append("`.`");
}
sb.append(outputTableName); sb.append(outputTableName);
sb.append('`'); sb.append('`');

View File

@ -99,6 +99,7 @@ public abstract class BaseSqoopTool extends com.cloudera.sqoop.tool.SqoopTool {
public static final String FMT_AVRODATAFILE_ARG = "as-avrodatafile"; public static final String FMT_AVRODATAFILE_ARG = "as-avrodatafile";
public static final String HIVE_IMPORT_ARG = "hive-import"; public static final String HIVE_IMPORT_ARG = "hive-import";
public static final String HIVE_TABLE_ARG = "hive-table"; public static final String HIVE_TABLE_ARG = "hive-table";
public static final String HIVE_DATABASE_ARG = "hive-database";
public static final String HIVE_OVERWRITE_ARG = "hive-overwrite"; public static final String HIVE_OVERWRITE_ARG = "hive-overwrite";
public static final String HIVE_DROP_DELIMS_ARG = "hive-drop-import-delims"; public static final String HIVE_DROP_DELIMS_ARG = "hive-drop-import-delims";
public static final String HIVE_DELIMS_REPLACEMENT_ARG = public static final String HIVE_DELIMS_REPLACEMENT_ARG =
@ -445,6 +446,11 @@ protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
.withDescription("Sets the table name to use when importing to hive") .withDescription("Sets the table name to use when importing to hive")
.withLongOpt(HIVE_TABLE_ARG) .withLongOpt(HIVE_TABLE_ARG)
.create()); .create());
hiveOpts.addOption(OptionBuilder.withArgName("database-name")
.hasArg()
.withDescription("Sets the database name to use when importing to hive")
.withLongOpt(HIVE_DATABASE_ARG)
.create());
hiveOpts.addOption(OptionBuilder hiveOpts.addOption(OptionBuilder
.withDescription("Drop Hive record \\0x01 and row delimiters " .withDescription("Drop Hive record \\0x01 and row delimiters "
+ "(\\n\\r) from imported string fields") + "(\\n\\r) from imported string fields")
@ -814,6 +820,10 @@ protected void applyHiveOptions(CommandLine in, SqoopOptions out)
out.setHiveTableName(in.getOptionValue(HIVE_TABLE_ARG)); out.setHiveTableName(in.getOptionValue(HIVE_TABLE_ARG));
} }
if(in.hasOption(HIVE_DATABASE_ARG)) {
out.setHiveDatabaseName(in.getOptionValue(HIVE_DATABASE_ARG));
}
if (in.hasOption(HIVE_DROP_DELIMS_ARG)) { if (in.hasOption(HIVE_DROP_DELIMS_ARG)) {
out.setHiveDropDelims(true); out.setHiveDropDelims(true);
} }

View File

@ -212,4 +212,26 @@ public void testUserMappingFailWhenCantBeApplied() throws Exception {
// Expected, ok // Expected, ok
} }
} }
public void testHiveDatabase() throws Exception {
String[] args = {
"--hive-database", "db",
};
Configuration conf = new Configuration();
SqoopOptions options =
new ImportTool().parseArguments(args, null, null, false);
TableDefWriter writer = new TableDefWriter(options,
null, HsqldbTestServer.getTableName(), "outputTable", conf, false);
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
writer.setColumnTypes(colTypes);
String createTable = writer.getCreateTableStmt();
assertNotNull(createTable);
assertTrue(createTable.contains("`db`.`outputTable`"));
String loadStmt = writer.getLoadDataStmt();
assertNotNull(loadStmt);
assertTrue(createTable.contains("`db`.`outputTable`"));
}
} }