5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-04 21:19:34 +08:00

SQOOP-2744: Sqoop2: Enable kerberos for HadoopMiniCluster

(Dian Fu via Jarek Jarcec Cecho)
This commit is contained in:
Jarek Jarcec Cecho 2016-02-05 11:33:35 -08:00
parent c2180caac1
commit 2f4da466ef
15 changed files with 446 additions and 127 deletions

View File

@ -217,6 +217,21 @@ limitations under the License.
</properties>
</configuration>
</execution>
<execution>
<id>hive-test</id>
<goals>
<goal>test</goal>
</goals>
<phase>integration-test</phase>
<configuration>
<suiteXmlFiles>
<suiteXmlFile>src/test/resources/hive-tests-suite.xml</suiteXmlFile>
</suiteXmlFiles>
<properties>
<suitename>hive-tests</suitename>
</properties>
</configuration>
</execution>
<execution>
<id>new-integration-test</id>
<goals>

View File

@ -45,15 +45,20 @@ public class HadoopMiniClusterRunner extends HadoopRunner {
@Override
public Configuration prepareConfiguration(Configuration config)
throws Exception {
config.set("dfs.block.access.token.enable", "false");
config.set("dfs.permissions", "true");
config.set("hadoop.security.authentication", "simple");
config.set("mapred.tasktracker.map.tasks.maximum", "1");
config.set("mapred.tasktracker.reduce.tasks.maximum", "1");
config.set("mapred.submit.replication", "1");
config.set("yarn.resourcemanager.scheduler.class", "org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler");
config.set("yarn.application.classpath",
System.getProperty("java.class.path"));
config.set("dfs.datanode.address", "0.0.0.0:0");
config.set("dfs.datanode.ipc.address", "0.0.0.0:0");
config.set("dfs.datanode.http.address", "0.0.0.0:0");
config.set("dfs.datanode.https.address", "0.0.0.0:0");
config.set("dfs.namenode.http-address", "0.0.0.0:0");
config.set("dfs.namenode.https-address", "0.0.0.0:0");
return config;
}
@ -68,7 +73,12 @@ public void start() throws Exception {
// Start DFS server
LOG.info("Starting DFS cluster...");
dfsCluster = new MiniDFSCluster(config, 1, true, null);
dfsCluster = new MiniDFSCluster.Builder(config)
.numDataNodes(1)
.format(true)
.racks(null)
.checkDataNodeAddrConfig(true)
.build();
if (dfsCluster.isClusterUp()) {
LOG.info("Started DFS cluster on port: " + dfsCluster.getNameNodePort());
} else {

View File

@ -71,7 +71,7 @@ abstract public Configuration prepareConfiguration(Configuration config)
* This directory might be on local filesystem in case of local mode.
*/
public String getTestDirectory() {
return "/mapreduce-job-io";
return "/tmp/mapreduce-job-io";
}
/**

View File

@ -140,7 +140,7 @@ public String getTestName() {
* @param context TestNG context that helps get all the test methods and classes.
*/
@BeforeSuite(dependsOnMethods = "findSuiteName")
public static void startInfrastructureProviders(ITestContext context) {
public static void startInfrastructureProviders(ITestContext context) throws Exception {
// Find infrastructure provider classes to be used.
Set<Class<? extends InfrastructureProvider>> providers = new HashSet<Class<? extends InfrastructureProvider>>();
for (ITestNGMethod method : context.getSuite().getAllMethods()) {
@ -182,6 +182,10 @@ public static void startInfrastructureProviders(ITestContext context) {
KdcInfrastructureProvider kdcProviderObject = startInfrastructureProvider(KdcInfrastructureProvider.class, conf, null);
kdc = kdcProviderObject.getInstance();
providers.remove(KdcInfrastructureProvider.class);
conf = kdc.prepareHadoopConfiguration(conf);
} else {
conf.set("dfs.block.access.token.enable", "false");
conf.set("hadoop.security.authentication", "simple");
}
// Start hadoop secondly.

View File

@ -18,7 +18,9 @@
package org.apache.sqoop.test.kdc;
import java.net.URL;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.sqoop.client.SqoopClient;
@ -37,6 +39,11 @@ public abstract class KdcRunner {
*/
private String temporaryPath;
public abstract Configuration prepareHadoopConfiguration(Configuration config)
throws Exception;
public abstract Map<String, String> prepareSqoopConfiguration(Map<String, String> properties);
/**
* Start kdc.
*
@ -65,12 +72,6 @@ public abstract class KdcRunner {
public abstract void authenticateWithSqoopServer(final URL url,
final DelegationTokenAuthenticatedURL.Token authToken) throws Exception;
public abstract boolean isKerberosEnabled();
public abstract String getSpnegoPrincipal();
public abstract String getSqoopServerKeytabFile();
/**
* Get temporary path.
*

View File

@ -18,11 +18,31 @@
package org.apache.sqoop.test.kdc;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.math.BigInteger;
import java.net.URL;
import java.security.GeneralSecurityException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.security.SecureRandom;
import java.security.SignatureException;
import java.security.cert.Certificate;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -31,18 +51,21 @@
import java.util.concurrent.Callable;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.x500.X500Principal;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
import org.apache.sqoop.client.SqoopClient;
import org.apache.sqoop.model.MConnector;
import org.apache.sqoop.test.utils.HdfsUtils;
import org.apache.sqoop.test.utils.SqoopUtils;
import org.bouncycastle.x509.X509V1CertificateGenerator;
/**
* Represents a Minikdc setup. Minikdc should be only used together with
@ -59,7 +82,75 @@ public class MiniKdcRunner extends KdcRunner {
private String sqoopClientKeytabFile;
private String spnegoPrincipal;
private String sqoopServerKeytabFile;
private String spnegoKeytabFile;
// Currently all the services such as NameNode, DataNode, ResourceManager, NodeManager, JobHistoryServer,
// Hive Metastore, Hive Server 2, Sqoop Server, etc login with the same principal as all the services are
// running in the same JVM in the integration tests and the variable loginUser in the UserGroupInformation
// which represents the currently login user is static.
private String hadoopPrincipal;
private String hadoopKeytabFile;
@Override
public Configuration prepareHadoopConfiguration(Configuration config) throws Exception {
config.set("hadoop.security.authentication", "kerberos");
// HDFS related configurations
// NameNode configurations
config.set("dfs.namenode.kerberos.principal", hadoopPrincipal);
config.set("dfs.namenode.keytab.file", hadoopKeytabFile);
config.set("dfs.namenode.kerberos.internal.spnego.principal", spnegoPrincipal);
config.set("dfs.web.authentication.kerberos.principal", spnegoPrincipal);
config.set("dfs.web.authentication.kerberos.keytab", spnegoKeytabFile);
config.set("dfs.encrypt.data.transfer", "true");
// DataNode configurations
config.set("dfs.datanode.kerberos.principal", hadoopPrincipal);
config.set("dfs.datanode.keytab.file", hadoopKeytabFile);
String sslKeystoresDir = getTemporaryPath() + "/ssl-keystore";
String sslConfDir = getClasspathDir(MiniKdcRunner.class);
FileUtils.deleteDirectory(new File(sslKeystoresDir));
FileUtils.forceMkdir(new File(sslKeystoresDir));
setupSSLConfig(sslKeystoresDir, sslConfDir, config, false, true);
config.set("dfs.https.server.keystore.resource", getSSLConfigFileName("ssl-server"));
// Configurations used by both NameNode and DataNode
config.set("dfs.block.access.token.enable", "true");
config.set("dfs.http.policy", "HTTPS_ONLY");
// Configurations used by DFSClient
config.set("dfs.data.transfer.protection", "privacy");
config.set("dfs.client.https.keystore.resource", getSSLConfigFileName("ssl-client"));
// YARN related configurations
config.set("yarn.resourcemanager.principal", hadoopPrincipal);
config.set("yarn.resourcemanager.keytab", hadoopKeytabFile);
config.set("yarn.resourcemanager.webapp.spnego-principal", spnegoPrincipal);
config.set("yarn.resourcemanager.webapp.spnego-keytab-file", spnegoKeytabFile);
config.set("yarn.nodemanager.principal", hadoopPrincipal);
config.set("yarn.nodemanager.keytab", hadoopKeytabFile);
// MapReduce related configurations
config.set("mapreduce.jobhistory.principal", hadoopPrincipal);
config.set("mapreduce.jobhistory.keytab", hadoopKeytabFile);
config.set("yarn.app.mapreduce.am.command-opts",
"-Xmx1024m -Djava.security.krb5.conf=\\\"" +
miniKdc.getKrb5conf().getCanonicalPath() + "\\\"");
config.set("mapred.child.java.opts",
"-Xmx200m -Djava.security.krb5.conf=\\\"" +
miniKdc.getKrb5conf().getCanonicalPath() + "\\\"");
return config;
}
public Map<String, String> prepareSqoopConfiguration(Map<String, String> properties) {
properties.put("org.apache.sqoop.security.authentication.type", "KERBEROS");
properties.put("org.apache.sqoop.security.authentication.kerberos.http.principal", spnegoPrincipal);
properties.put("org.apache.sqoop.security.authentication.kerberos.http.keytab", spnegoKeytabFile);
// Sqoop Server do kerberos authentication with other services
properties.put("org.apache.sqoop.security.authentication.handler", "org.apache.sqoop.security.authentication.KerberosAuthenticationHandler");
properties.put("org.apache.sqoop.security.authentication.kerberos.principal", hadoopPrincipal);
properties.put("org.apache.sqoop.security.authentication.kerberos.keytab", hadoopKeytabFile);
return properties;
}
@Override
public void start() throws Exception {
@ -82,17 +173,7 @@ public MiniKdc getMiniKdc() {
return miniKdc;
}
@Override
public String getSpnegoPrincipal() {
return spnegoPrincipal;
}
@Override
public String getSqoopServerKeytabFile() {
return sqoopServerKeytabFile;
}
private static class KerberosConfiguration extends Configuration {
private static class KerberosConfiguration extends javax.security.auth.login.Configuration {
private String principal;
private String keytabFile;
@ -168,52 +249,70 @@ public Void call() throws Exception {
});
}
@Override
public boolean isKerberosEnabled() {
return true;
}
private void createPrincipals() throws Exception {
createPrincipalsForSqoopClient();
createPrincipalsForSqoopServer();
createSpnegoPrincipal();
createSqoopPrincipals();
createHadoopPrincipals();
}
private void createPrincipalsForSqoopClient() throws Exception {
String keytabDir = HdfsUtils.joinPathFragments(getTemporaryPath(), "sqoop-client");
/**
* Create spnego principal which will be used by all the http servers.
*/
private void createSpnegoPrincipal() throws Exception {
String keytabDir = HdfsUtils.joinPathFragments(getTemporaryPath(), "spnego");
File keytabDirFile = new File(keytabDir);
FileUtils.deleteDirectory(keytabDirFile);
FileUtils.forceMkdir(keytabDirFile);
String userName = "sqoopclient";
File userKeytabFile = new File(keytabDirFile, userName + ".keytab");
miniKdc.createPrincipal(userKeytabFile, userName);
sqoopClientPrincipal = userName + "@" + miniKdc.getRealm();
File keytabFile = new File(keytabDirFile, "HTTP.keytab");
String host = SqoopUtils.getLocalHostName();
miniKdc.createPrincipal(keytabFile, "HTTP/" + host);
spnegoKeytabFile = keytabFile.getAbsolutePath();
spnegoPrincipal = "HTTP/" + host + "@" + miniKdc.getRealm();
}
private void createSqoopPrincipals() throws Exception {
String keytabDir = HdfsUtils.joinPathFragments(getTemporaryPath(), "sqoop");
File keytabDirFile = new File(keytabDir);
FileUtils.deleteDirectory(keytabDirFile);
FileUtils.forceMkdir(keytabDirFile);
String sqoopClientUserName = "sqoopclient";
File userKeytabFile = new File(keytabDirFile, sqoopClientUserName + ".keytab");
miniKdc.createPrincipal(userKeytabFile, sqoopClientUserName);
sqoopClientPrincipal = sqoopClientUserName + "@" + miniKdc.getRealm();
sqoopClientKeytabFile = userKeytabFile.getAbsolutePath();
}
private void createPrincipalsForSqoopServer() throws Exception {
String keytabDir = HdfsUtils.joinPathFragments(getTemporaryPath(), "sqoop-server");
private void createHadoopPrincipals() throws Exception {
String keytabDir = HdfsUtils.joinPathFragments(getTemporaryPath(), "hadoop");
File keytabDirFile = new File(keytabDir);
FileUtils.deleteDirectory(keytabDirFile);
FileUtils.forceMkdir(keytabDirFile);
String sqoopUserName = "sqoopserver";
File sqoopKeytabFile = new File(keytabDirFile, sqoopUserName + ".keytab");
// Create principals for Hadoop, this principal will be used by all services
// Reference SQOOP-2744 for detailed information
String hadoopUserName = "hadoop";
File keytabFile = new File(keytabDirFile, hadoopUserName + ".keytab");
String host = SqoopUtils.getLocalHostName();
miniKdc.createPrincipal(sqoopKeytabFile, "HTTP/" + host);
sqoopServerKeytabFile = sqoopKeytabFile.getAbsolutePath();
spnegoPrincipal = "HTTP/" + host + "@" + miniKdc.getRealm();
miniKdc.createPrincipal(keytabFile, hadoopUserName + "/" + host);
hadoopKeytabFile = keytabFile.getAbsolutePath();
hadoopPrincipal = hadoopUserName + "/" + host + "@" + miniKdc.getRealm();
}
private <T> T doAsSqoopClient(Callable<T> callable) throws Exception {
return doAs(sqoopClientPrincipal, sqoopClientKeytabFile, callable);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private static <T> T doAs(String principal, String keytabFile, final Callable<T> callable) throws Exception {
LoginContext loginContext = null;
try {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Class userClass = Class.forName("org.apache.hadoop.security.User");
Constructor<?> constructor = userClass.getDeclaredConstructor(String.class);
constructor.setAccessible(true);
principals.add((Principal)constructor.newInstance(principal));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
loginContext = new LoginContext("", subject, null, new KerberosConfiguration(principal, keytabFile));
loginContext.login();
@ -232,4 +331,219 @@ public T run() throws Exception {
}
}
}
@SuppressWarnings("rawtypes")
private static String getClasspathDir(Class klass) throws Exception {
String file = klass.getName();
file = file.replace('.', '/') + ".class";
URL url = Thread.currentThread().getContextClassLoader().getResource(file);
String baseDir = url.toURI().getPath();
baseDir = baseDir.substring(0, baseDir.length() - file.length() - 1);
return baseDir;
}
/**
* Performs complete setup of SSL configuration. This includes keys, certs,
* keystores, truststores, the server SSL configuration file,
* the client SSL configuration file.
*
* @param keystoresDir String directory to save keystores
* @param sslConfDir String directory to save SSL configuration files
* @param conf Configuration
* @param useClientCert boolean true to make the client present a cert in the
* SSL handshake
* @param trustStore boolean true to create truststore, false not to create it
*/
private void setupSSLConfig(String keystoresDir, String sslConfDir,
Configuration conf, boolean useClientCert, boolean trustStore)
throws Exception {
String clientKS = keystoresDir + "/clientKS.jks";
String clientPassword = "clientP";
String serverKS = keystoresDir + "/serverKS.jks";
String serverPassword = "serverP";
String trustKS = null;
String trustPassword = "trustP";
File sslClientConfFile = new File(sslConfDir, getSSLConfigFileName("ssl-client"));
File sslServerConfFile = new File(sslConfDir, getSSLConfigFileName("ssl-server"));
Map<String, X509Certificate> certs = new HashMap<String, X509Certificate>();
if (useClientCert) {
KeyPair cKP = generateKeyPair("RSA");
X509Certificate cCert = generateCertificate("CN=localhost, O=client", cKP, 30, "SHA1withRSA");
createKeyStore(clientKS, clientPassword, "client", cKP.getPrivate(), cCert);
certs.put("client", cCert);
}
KeyPair sKP = generateKeyPair("RSA");
X509Certificate sCert = generateCertificate("CN=localhost, O=server", sKP, 30, "SHA1withRSA");
createKeyStore(serverKS, serverPassword, "server", sKP.getPrivate(), sCert);
certs.put("server", sCert);
if (trustStore) {
trustKS = keystoresDir + "/trustKS.jks";
createTrustStore(trustKS, trustPassword, certs);
}
Configuration clientSSLConf = createSSLConfig(
SSLFactory.Mode.CLIENT, clientKS, clientPassword, clientPassword, trustKS);
Configuration serverSSLConf = createSSLConfig(
SSLFactory.Mode.SERVER, serverKS, serverPassword, serverPassword, trustKS);
saveConfig(sslClientConfFile, clientSSLConf);
saveConfig(sslServerConfFile, serverSSLConf);
conf.set(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, "ALLOW_ALL");
conf.set(SSLFactory.SSL_CLIENT_CONF_KEY, sslClientConfFile.getName());
conf.set(SSLFactory.SSL_SERVER_CONF_KEY, sslServerConfFile.getName());
conf.setBoolean(SSLFactory.SSL_REQUIRE_CLIENT_CERT_KEY, useClientCert);
}
/**
* Returns an SSL configuration file name. Under parallel test
* execution, this file name is parameterized by a unique ID to ensure that
* concurrent tests don't collide on an SSL configuration file.
*
* @param base the base of the file name
* @return SSL configuration file name for base
*/
private static String getSSLConfigFileName(String base) {
String testUniqueForkId = System.getProperty("test.unique.fork.id");
String fileSuffix = testUniqueForkId != null ? "-" + testUniqueForkId : "";
return base + fileSuffix + ".xml";
}
/**
* Creates SSL configuration.
*
* @param mode SSLFactory.Mode mode to configure
* @param keystore String keystore file
* @param password String store password, or null to avoid setting store
* password
* @param keyPassword String key password, or null to avoid setting key
* password
* @param trustKS String truststore file
* @return Configuration for SSL
*/
private static Configuration createSSLConfig(SSLFactory.Mode mode,
String keystore, String password, String keyPassword, String trustKS) {
String trustPassword = "trustP";
Configuration sslConf = new Configuration(false);
if (keystore != null) {
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_KEYSTORE_LOCATION_TPL_KEY), keystore);
}
if (password != null) {
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY), password);
}
if (keyPassword != null) {
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
keyPassword);
}
if (trustKS != null) {
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_TRUSTSTORE_LOCATION_TPL_KEY), trustKS);
}
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_TRUSTSTORE_PASSWORD_TPL_KEY),
trustPassword);
sslConf.set(FileBasedKeyStoresFactory.resolvePropertyName(mode,
FileBasedKeyStoresFactory.SSL_TRUSTSTORE_RELOAD_INTERVAL_TPL_KEY), "1000");
return sslConf;
}
private static KeyPair generateKeyPair(String algorithm)
throws NoSuchAlgorithmException {
KeyPairGenerator keyGen = KeyPairGenerator.getInstance(algorithm);
keyGen.initialize(1024);
return keyGen.genKeyPair();
}
/**
* Create a self-signed X.509 Certificate.
*
* @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB"
* @param pair the KeyPair
* @param days how many days from now the Certificate is valid for
* @param algorithm the signing algorithm, eg "SHA1withRSA"
* @return the self-signed certificate
*/
private static X509Certificate generateCertificate(String dn, KeyPair pair, int days, String algorithm)
throws CertificateEncodingException,
InvalidKeyException,
IllegalStateException,
NoSuchProviderException, NoSuchAlgorithmException, SignatureException{
Date from = new Date();
Date to = new Date(from.getTime() + days * 86400000l);
BigInteger sn = new BigInteger(64, new SecureRandom());
KeyPair keyPair = pair;
X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
X500Principal dnName = new X500Principal(dn);
certGen.setSerialNumber(sn);
certGen.setIssuerDN(dnName);
certGen.setNotBefore(from);
certGen.setNotAfter(to);
certGen.setSubjectDN(dnName);
certGen.setPublicKey(keyPair.getPublic());
certGen.setSignatureAlgorithm(algorithm);
X509Certificate cert = certGen.generate(pair.getPrivate());
return cert;
}
private static void createKeyStore(String filename,
String password, String alias,
Key privateKey, Certificate cert)
throws GeneralSecurityException, IOException {
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null); // initialize
ks.setKeyEntry(alias, privateKey, password.toCharArray(),
new Certificate[]{cert});
saveKeyStore(ks, filename, password);
}
private static <T extends Certificate> void createTrustStore(
String filename, String password, Map<String, T> certs)
throws GeneralSecurityException, IOException {
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null); // initialize
for (Map.Entry<String, T> cert : certs.entrySet()) {
ks.setCertificateEntry(cert.getKey(), cert.getValue());
}
saveKeyStore(ks, filename, password);
}
private static void saveKeyStore(KeyStore ks, String filename,
String password)
throws GeneralSecurityException, IOException {
FileOutputStream out = new FileOutputStream(filename);
try {
ks.store(out, password.toCharArray());
} finally {
out.close();
}
}
/**
* Saves configuration to a file.
*
* @param file File to save
* @param conf Configuration contents to write to file
* @throws IOException if there is an I/O error saving the file
*/
private static void saveConfig(File file, Configuration conf)
throws IOException {
Writer writer = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
try {
conf.writeXml(writer);
} finally {
writer.close();
}
}
}

View File

@ -1,66 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.test.kdc;
import java.net.URL;
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL.Token;
import org.apache.sqoop.client.SqoopClient;
/**
* This class enables running tests without kerberos enabled.
*/
public class NoKerberosKdcRunner extends KdcRunner {
@Override
public void start() throws Exception {
// Do nothing
}
@Override
public void stop() throws Exception {
// Do nothing
}
@Override
public void authenticateWithSqoopServer(SqoopClient client) throws Exception {
// Do nothing
}
@Override
public void authenticateWithSqoopServer(URL url, Token authToken)
throws Exception {
// Do nothing
}
@Override
public boolean isKerberosEnabled() {
return false;
}
@Override
public String getSpnegoPrincipal() {
return null;
}
@Override
public String getSqoopServerKeytabFile() {
return null;
}
}

View File

@ -209,18 +209,14 @@ protected Map<String, String> getExecutionEngineConfiguration() {
protected Map<String, String> getSecurityConfiguration() {
Map<String, String> properties = new HashMap<String, String>();
if (kdc != null && kdc.isKerberosEnabled()) {
// Sqoop Server is kerberos enabled
properties.put("org.apache.sqoop.security.authentication.type", "KERBEROS");
properties.put("org.apache.sqoop.security.authentication.kerberos.http.principal", kdc.getSpnegoPrincipal());
properties.put("org.apache.sqoop.security.authentication.kerberos.http.keytab", kdc.getSqoopServerKeytabFile());
if (kdc != null) {
properties = kdc.prepareSqoopConfiguration(properties);
} else {
properties.put("org.apache.sqoop.security.authentication.type", "SIMPLE");
// Sqoop Server do simple authentication with other services
properties.put("org.apache.sqoop.security.authentication.handler", "org.apache.sqoop.security.authentication.SimpleAuthenticationHandler");
}
// Sqoop Server do simple authentication with other services
properties.put("org.apache.sqoop.security.authentication.handler", "org.apache.sqoop.security.authentication.SimpleAuthenticationHandler");
/**
* Due to the fact that we share a JVM with hadoop during unit testing,
* proxy user configuration is also shared with hadoop.
@ -232,6 +228,8 @@ protected Map<String, String> getSecurityConfiguration() {
String user = System.getProperty("user.name");
properties.put("org.apache.sqoop.authentication.proxyuser." + user + ".groups", "*");
properties.put("org.apache.sqoop.authentication.proxyuser." + user + ".hosts", "*");
properties.put("org.apache.sqoop.authentication.proxyuser." + "hadoop" + ".groups", "*");
properties.put("org.apache.sqoop.authentication.proxyuser." + "hadoop" + ".hosts", "*");
return properties;
}

View File

@ -43,6 +43,7 @@ public class OutputDirectoryTest extends SqoopTestCase {
public void testOutputDirectoryIsAFile() throws Exception {
createAndLoadTableCities();
hdfsClient.delete(new Path(getMapreduceDirectory()), true);
hdfsClient.createNewFile(new Path(getMapreduceDirectory()));
// RDBMS link

View File

@ -30,7 +30,6 @@
import org.apache.sqoop.test.infrastructure.providers.DatabaseInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.HiveInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.SqoopInfrastructureProvider;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
@ -42,7 +41,7 @@
import java.util.List;
@Test(groups = {"slow", "no-real-cluster"})
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, HadoopInfrastructureProvider.class, HiveInfrastructureProvider.class, SqoopInfrastructureProvider.class, DatabaseInfrastructureProvider.class})
@Infrastructure(dependencies = {HadoopInfrastructureProvider.class, HiveInfrastructureProvider.class, SqoopInfrastructureProvider.class, DatabaseInfrastructureProvider.class})
public class FromRDBMSToKiteHiveTest extends SqoopTestCase {
private String testName;
@ -103,6 +102,8 @@ public void createLinks() {
kiteLink = getClient().createLink("kite-connector");
kiteLink.getConnectorLinkConfig().getStringInput("linkConfig.authority")
.setValue(getInfrastructureProvider(HiveInfrastructureProvider.class).getHiveMetastore().getAuthority());
kiteLink.getConnectorLinkConfig().getStringInput("linkConfig.confDir")
.setValue(getInfrastructureProvider(SqoopInfrastructureProvider.class).getInstance().getConfigurationPath());
saveLink(kiteLink);
}

View File

@ -22,6 +22,7 @@
import org.apache.sqoop.core.ConfigurationConstants;
import org.apache.sqoop.test.infrastructure.Infrastructure;
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
@ -34,7 +35,7 @@
import java.util.Map;
@Test(groups = "no-real-cluster")
@Infrastructure(dependencies = {KdcInfrastructureProvider.class})
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, HadoopInfrastructureProvider.class})
public class BlacklistedConnectorTest extends SqoopTestCase {
private SqoopMiniCluster sqoopMiniCluster;

View File

@ -26,6 +26,7 @@
import org.apache.sqoop.test.infrastructure.Infrastructure;
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
import org.apache.sqoop.test.infrastructure.providers.DatabaseInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
@ -41,7 +42,7 @@
import java.util.Map;
@Test(groups = "no-real-cluster")
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, DatabaseInfrastructureProvider.class})
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, DatabaseInfrastructureProvider.class, HadoopInfrastructureProvider.class})
public class ClasspathTest extends SqoopTestCase {
private static final String TEST_CONNECTOR_JAR_NAME = "test-connector.jar";

View File

@ -30,6 +30,7 @@
import org.apache.sqoop.model.MLink;
import org.apache.sqoop.test.infrastructure.Infrastructure;
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
import org.apache.sqoop.test.infrastructure.providers.HadoopInfrastructureProvider;
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
@ -40,7 +41,7 @@
import org.testng.annotations.Test;
@Test(groups = "no-real-cluster")
@Infrastructure(dependencies = {KdcInfrastructureProvider.class})
@Infrastructure(dependencies = {KdcInfrastructureProvider.class, HadoopInfrastructureProvider.class})
public class ConnectorClasspathIsolationTest extends SqoopTestCase {
private static final String TEST_FROM_CONNECTOR_JAR_NAME = "test-from-connector.jar";

View File

@ -0,0 +1,32 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!DOCTYPE suite SYSTEM "http://testng.org/testng-1.0.dtd" >
<suite name="HiveTests" verbose="2" parallel="false">
<listeners>
<listener class-name="org.apache.sqoop.test.testng.SqoopTestListener" />
</listeners>
<test name="HiveTests">
<packages>
<package name="org.apache.sqoop.integration.connector.hive"/>
</packages>
</test>
</suite>

View File

@ -26,7 +26,13 @@ limitations under the License.
<test name="ConnectorTests">
<packages>
<package name="org.apache.sqoop.integration.connector.*"/>
<package name="org.apache.sqoop.integration.connector.*">
<!--
Exclude hive tests for the time being as there are currently some blocking issues
(SQOOP-2756) which make it impossible to run hive tests in kerberos environment.
-->
<exclude name="org.apache.sqoop.integration.connector.hive"></exclude>
</package>
</packages>
</test>
</suite>
</suite>