mirror of
https://github.com/apache/sqoop.git
synced 2025-05-02 17:40:39 +08:00
SQOOP-3423: Let user pass password to connect Hive when it set to LDAP authentication
(Denes Bodo via Boglarka Egyed)
This commit is contained in:
parent
1a04d2007d
commit
dfeb14534f
@ -47,6 +47,8 @@ Argument Description
|
|||||||
+\--hs2-url+ The JDBC connection string to HiveServer2 as you would specify in Beeline. If you use this option with \
|
+\--hs2-url+ The JDBC connection string to HiveServer2 as you would specify in Beeline. If you use this option with \
|
||||||
--hive-import then Sqoop will try to connect to HiveServer2 instead of using Hive CLI.
|
--hive-import then Sqoop will try to connect to HiveServer2 instead of using Hive CLI.
|
||||||
+\--hs2-user+ The user for creating the JDBC connection to HiveServer2. The default is the current OS user.
|
+\--hs2-user+ The user for creating the JDBC connection to HiveServer2. The default is the current OS user.
|
||||||
|
+\--hs2-password+ The password for creating the JDBC connection to HiveServer2. If not specified, kerberos\
|
||||||
|
authentication will be used.
|
||||||
+\--hs2-keytab+ The path to the keytab file of the user connecting to HiveServer2. If you choose another \
|
+\--hs2-keytab+ The path to the keytab file of the user connecting to HiveServer2. If you choose another \
|
||||||
HiveServer2 user (with --hs2-user) then --hs2-keytab has to be also specified otherwise it can be omitted.
|
HiveServer2 user (with --hs2-user) then --hs2-keytab has to be also specified otherwise it can be omitted.
|
||||||
+\--external-table-dir+ Used to specify that the table is external, not managed. \
|
+\--external-table-dir+ Used to specify that the table is external, not managed. \
|
||||||
|
@ -48,7 +48,7 @@ Sqoop will use +$HIVE_HOME/bin/hive+ from here.
|
|||||||
will not be transferred via the JDBC connection it is written directly to HDFS
|
will not be transferred via the JDBC connection it is written directly to HDFS
|
||||||
just like in case of the default hive import. As HiveServer2 provides proper
|
just like in case of the default hive import. As HiveServer2 provides proper
|
||||||
authorization and auditing features it is recommended to use this instead of
|
authorization and auditing features it is recommended to use this instead of
|
||||||
the default. Currently only Kerberos authentication and text file format is
|
the default. Currently only Kerberos and LDAP authentication and text file format is
|
||||||
supported with this option.
|
supported with this option.
|
||||||
|
|
||||||
NOTE: This function is incompatible with +\--as-avrodatafile+ and
|
NOTE: This function is incompatible with +\--as-avrodatafile+ and
|
||||||
|
@ -467,6 +467,9 @@ public String toString() {
|
|||||||
@StoredAsProperty("hs2.user")
|
@StoredAsProperty("hs2.user")
|
||||||
private String hs2User;
|
private String hs2User;
|
||||||
|
|
||||||
|
@StoredAsProperty("hs2.password")
|
||||||
|
private String hs2Password;
|
||||||
|
|
||||||
@StoredAsProperty("hs2.keytab")
|
@StoredAsProperty("hs2.keytab")
|
||||||
private String hs2Keytab;
|
private String hs2Keytab;
|
||||||
|
|
||||||
@ -2975,10 +2978,18 @@ public String getHs2User() {
|
|||||||
return hs2User;
|
return hs2User;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getHs2Password() {
|
||||||
|
return hs2Password;
|
||||||
|
}
|
||||||
|
|
||||||
public void setHs2User(String hs2User) {
|
public void setHs2User(String hs2User) {
|
||||||
this.hs2User = hs2User;
|
this.hs2User = hs2User;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setHs2Password(String hs2Password) {
|
||||||
|
this.hs2Password = hs2Password;
|
||||||
|
}
|
||||||
|
|
||||||
public String getHs2Keytab() {
|
public String getHs2Keytab() {
|
||||||
return hs2Keytab;
|
return hs2Keytab;
|
||||||
}
|
}
|
||||||
|
@ -34,8 +34,12 @@ public class HiveServer2ConnectionFactoryInitializer {
|
|||||||
|
|
||||||
public JdbcConnectionFactory createJdbcConnectionFactory(SqoopOptions sqoopOptions) {
|
public JdbcConnectionFactory createJdbcConnectionFactory(SqoopOptions sqoopOptions) {
|
||||||
String connectionUsername = determineConnectionUsername(sqoopOptions);
|
String connectionUsername = determineConnectionUsername(sqoopOptions);
|
||||||
JdbcConnectionFactory connectionFactory = new HiveServer2ConnectionFactory(sqoopOptions.getHs2Url(), connectionUsername);
|
String connectionPassword = sqoopOptions.getHs2Password();
|
||||||
if (useKerberizedConnection(sqoopOptions)) {
|
JdbcConnectionFactory connectionFactory = new HiveServer2ConnectionFactory(
|
||||||
|
sqoopOptions.getHs2Url(),
|
||||||
|
connectionUsername,
|
||||||
|
connectionPassword);
|
||||||
|
if (connectionPassword == null && useKerberizedConnection(sqoopOptions)) {
|
||||||
KerberosAuthenticator authenticator = createKerberosAuthenticator(sqoopOptions);
|
KerberosAuthenticator authenticator = createKerberosAuthenticator(sqoopOptions);
|
||||||
connectionFactory = new KerberizedConnectionFactoryDecorator(connectionFactory, authenticator);
|
connectionFactory = new KerberizedConnectionFactoryDecorator(connectionFactory, authenticator);
|
||||||
}
|
}
|
||||||
|
@ -142,6 +142,7 @@ public abstract class BaseSqoopTool extends org.apache.sqoop.tool.SqoopTool {
|
|||||||
public static final String HCATALOG_HOME_ARG = "hcatalog-home";
|
public static final String HCATALOG_HOME_ARG = "hcatalog-home";
|
||||||
public static final String HS2_URL_ARG = "hs2-url";
|
public static final String HS2_URL_ARG = "hs2-url";
|
||||||
public static final String HS2_USER_ARG = "hs2-user";
|
public static final String HS2_USER_ARG = "hs2-user";
|
||||||
|
public static final String HS2_PASSWORD_ARG = "hs2-password";
|
||||||
public static final String HS2_KEYTAB_ARG = "hs2-keytab";
|
public static final String HS2_KEYTAB_ARG = "hs2-keytab";
|
||||||
public static final String MAPREDUCE_JOB_NAME = "mapreduce-job-name";
|
public static final String MAPREDUCE_JOB_NAME = "mapreduce-job-name";
|
||||||
public static final String NUM_MAPPERS_ARG = "num-mappers";
|
public static final String NUM_MAPPERS_ARG = "num-mappers";
|
||||||
@ -637,6 +638,10 @@ protected RelatedOptions getHiveOptions(boolean explicitHiveImport) {
|
|||||||
.withDescription("The user/principal for HiveServer2.")
|
.withDescription("The user/principal for HiveServer2.")
|
||||||
.withLongOpt(HS2_USER_ARG)
|
.withLongOpt(HS2_USER_ARG)
|
||||||
.create());
|
.create());
|
||||||
|
hiveOpts.addOption(OptionBuilder.hasArg()
|
||||||
|
.withDescription("The LDAP password for HiveServer2.")
|
||||||
|
.withLongOpt(HS2_PASSWORD_ARG)
|
||||||
|
.create());
|
||||||
hiveOpts.addOption(OptionBuilder
|
hiveOpts.addOption(OptionBuilder
|
||||||
.hasArg()
|
.hasArg()
|
||||||
.withDescription("The location of the keytab of the HiveServer2 user.")
|
.withDescription("The location of the keytab of the HiveServer2 user.")
|
||||||
@ -1283,6 +1288,9 @@ protected void applyHiveOptions(CommandLine in, SqoopOptions out)
|
|||||||
if (in.hasOption(HS2_USER_ARG)) {
|
if (in.hasOption(HS2_USER_ARG)) {
|
||||||
out.setHs2User(in.getOptionValue(HS2_USER_ARG));
|
out.setHs2User(in.getOptionValue(HS2_USER_ARG));
|
||||||
}
|
}
|
||||||
|
if (in.hasOption(HS2_PASSWORD_ARG)) {
|
||||||
|
out.setHs2Password(in.getOptionValue(HS2_PASSWORD_ARG));
|
||||||
|
}
|
||||||
if (in.hasOption(HS2_KEYTAB_ARG)) {
|
if (in.hasOption(HS2_KEYTAB_ARG)) {
|
||||||
out.setHs2Keytab(in.getOptionValue(HS2_KEYTAB_ARG));
|
out.setHs2Keytab(in.getOptionValue(HS2_KEYTAB_ARG));
|
||||||
}
|
}
|
||||||
|
@ -42,6 +42,8 @@ public class HiveServer2ConnectionFactoryInitializerTest {
|
|||||||
|
|
||||||
private static final String TEST_HS2_USER = "testuser";
|
private static final String TEST_HS2_USER = "testuser";
|
||||||
|
|
||||||
|
private static final String TEST_HS2_PASSWORD = "testPass@123";
|
||||||
|
|
||||||
private static final String TEST_HS2_KEYTAB = "testkeytab";
|
private static final String TEST_HS2_KEYTAB = "testkeytab";
|
||||||
|
|
||||||
private HiveServer2ConnectionFactoryInitializer connectionFactoryInitializer;
|
private HiveServer2ConnectionFactoryInitializer connectionFactoryInitializer;
|
||||||
@ -83,6 +85,7 @@ public void testCreateJdbcConnectionFactoryInitializesConnectionUsernameProperly
|
|||||||
HiveServer2ConnectionFactory connectionFactory = (HiveServer2ConnectionFactory) connectionFactoryInitializer.createJdbcConnectionFactory(sqoopOptions);
|
HiveServer2ConnectionFactory connectionFactory = (HiveServer2ConnectionFactory) connectionFactoryInitializer.createJdbcConnectionFactory(sqoopOptions);
|
||||||
|
|
||||||
assertEquals(TEST_HS2_USER, connectionFactory.getUsername());
|
assertEquals(TEST_HS2_USER, connectionFactory.getUsername());
|
||||||
|
assertEquals(null, connectionFactory.getPassword());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -117,4 +120,13 @@ public void testCreateJdbcConnectionFactoryWithKerberosConfiguredInitializesDeco
|
|||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testConnectionFactoryWhenHivePasswordIsProvided() {
|
||||||
|
when(sqoopOptions.getHs2Password()).thenReturn(TEST_HS2_PASSWORD);
|
||||||
|
|
||||||
|
HiveServer2ConnectionFactory connectionFactory = (HiveServer2ConnectionFactory) connectionFactoryInitializer.createJdbcConnectionFactory(sqoopOptions);
|
||||||
|
|
||||||
|
assertEquals(TEST_HS2_PASSWORD, connectionFactory.getPassword());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user