5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-09 07:41:56 +08:00

SQOOP-2541: Sqoop2: Provide test infrastructure base class for upgrade tests

(Dian Fu via Jarek Jarcec Cecho)
This commit is contained in:
Jarek Jarcec Cecho 2016-01-04 19:03:02 -08:00
parent 23781db25f
commit 073da06dd4
2 changed files with 37 additions and 32 deletions

View File

@ -208,7 +208,7 @@ public static void startInfrastructureProviders(ITestContext context) {
* @param <T> * @param <T>
* @return * @return
*/ */
private static <T extends InfrastructureProvider> T startInfrastructureProvider(Class<T> providerClass, Configuration hadoopConfiguration, KdcRunner kdc) { protected static <T extends InfrastructureProvider> T startInfrastructureProvider(Class<T> providerClass, Configuration hadoopConfiguration, KdcRunner kdc) {
T providerObject; T providerObject;
try { try {
@ -354,17 +354,7 @@ public DelegationTokenAuthenticatedURL.Token getAuthToken() {
@BeforeMethod @BeforeMethod
public void init() throws Exception { public void init() throws Exception {
String serverUrl = getSqoopServerUrl(); initSqoopClient(getSqoopServerUrl());
if (serverUrl != null) {
client = new SqoopClient(serverUrl);
KdcInfrastructureProvider kdcProvider = getInfrastructureProvider(KdcInfrastructureProvider.class);
if (kdcProvider != null) {
kdcProvider.getInstance().authenticateWithSqoopServer(client);
kdcProvider.getInstance().authenticateWithSqoopServer(new URL(serverUrl), authToken);
}
}
if (getInfrastructureProvider(HadoopInfrastructureProvider.class) != null) { if (getInfrastructureProvider(HadoopInfrastructureProvider.class) != null) {
hdfsClient = FileSystem.get(getInfrastructureProvider(HadoopInfrastructureProvider.class).getHadoopConfiguration()); hdfsClient = FileSystem.get(getInfrastructureProvider(HadoopInfrastructureProvider.class).getHadoopConfiguration());
@ -376,6 +366,18 @@ public void init() throws Exception {
} }
} }
protected void initSqoopClient(String serverUrl) throws Exception {
if (serverUrl != null) {
client = new SqoopClient(serverUrl);
KdcInfrastructureProvider kdcProvider = getInfrastructureProvider(KdcInfrastructureProvider.class);
if (kdcProvider != null) {
kdcProvider.getInstance().authenticateWithSqoopServer(client);
kdcProvider.getInstance().authenticateWithSqoopServer(new URL(serverUrl), authToken);
}
}
}
/** /**
* Create link with asserts to make sure that it was created correctly. * Create link with asserts to make sure that it was created correctly.
* *
@ -638,4 +640,14 @@ protected String getTemporaryPath() {
protected String getSqoopMiniClusterTemporaryPath() { protected String getSqoopMiniClusterTemporaryPath() {
return getInfrastructureProvider(SqoopInfrastructureProvider.class).getRootPath(); return getInfrastructureProvider(SqoopInfrastructureProvider.class).getRootPath();
} }
protected Configuration getHadoopConf() {
Configuration hadoopConf = null;
if (getInfrastructureProvider(HadoopInfrastructureProvider.class) != null) {
hadoopConf = getInfrastructureProvider(HadoopInfrastructureProvider.class).getHadoopConfiguration();
} else {
hadoopConf = new Configuration();
}
return hadoopConf;
}
} }

View File

@ -18,11 +18,13 @@
package org.apache.sqoop.integration.repository.derby.upgrade; package org.apache.sqoop.integration.repository.derby.upgrade;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.client.SqoopClient;
import org.apache.sqoop.model.MJob; import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MLink; import org.apache.sqoop.model.MLink;
import org.apache.sqoop.test.infrastructure.Infrastructure;
import org.apache.sqoop.test.infrastructure.SqoopTestCase;
import org.apache.sqoop.test.infrastructure.providers.KdcInfrastructureProvider;
import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster; import org.apache.sqoop.test.minicluster.JettySqoopMiniCluster;
import org.apache.sqoop.test.testcases.JettyTestCase; import org.apache.sqoop.test.minicluster.SqoopMiniCluster;
import org.apache.sqoop.test.utils.CompressionUtils; import org.apache.sqoop.test.utils.CompressionUtils;
import org.apache.sqoop.test.utils.HdfsUtils; import org.apache.sqoop.test.utils.HdfsUtils;
import org.testng.ITestContext; import org.testng.ITestContext;
@ -30,7 +32,6 @@
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import org.apache.log4j.Logger;
import java.io.InputStream; import java.io.InputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -53,12 +54,14 @@
* methods describing content of the repository (what links/jobs it have, ...). * methods describing content of the repository (what links/jobs it have, ...).
* *
*/ */
public abstract class DerbyRepositoryUpgradeTest extends JettyTestCase { @Infrastructure(dependencies = {KdcInfrastructureProvider.class})
public abstract class DerbyRepositoryUpgradeTest extends SqoopTestCase {
private static final Logger LOG = Logger.getLogger(DerbyRepositoryUpgradeTest.class);
protected Map<Long, String> jobIdToNameMap; protected Map<Long, String> jobIdToNameMap;
protected Map<Long, String> linkIdToNameMap; protected Map<Long, String> linkIdToNameMap;
private SqoopMiniCluster sqoopMiniCluster;
/** /**
* Custom Sqoop mini cluster that points derby repository to real on-disk structures. * Custom Sqoop mini cluster that points derby repository to real on-disk structures.
*/ */
@ -131,17 +134,7 @@ public String getTemporaryJettyPath() {
return HdfsUtils.joinPathFragments(getTemporaryPath(), getClass().getCanonicalName(), getTestName()); return HdfsUtils.joinPathFragments(getTemporaryPath(), getClass().getCanonicalName(), getTestName());
} }
@Override @BeforeMethod(dependsOnMethods = { "init" })
public void startSqoop() throws Exception {
// Do nothing so that Sqoop isn't started before Suite.
}
@Override
public void stopSqoop() throws Exception {
// Do nothing so that Sqoop isn't stopped after Suite.
}
@BeforeMethod
public void startSqoopMiniCluster(ITestContext context) throws Exception { public void startSqoopMiniCluster(ITestContext context) throws Exception {
// Prepare older repository structures // Prepare older repository structures
InputStream tarballStream = getClass().getResourceAsStream(getPathToRepositoryTarball()); InputStream tarballStream = getClass().getResourceAsStream(getPathToRepositoryTarball());
@ -149,13 +142,13 @@ public void startSqoopMiniCluster(ITestContext context) throws Exception {
CompressionUtils.untarStreamToDirectory(tarballStream, getRepositoryPath()); CompressionUtils.untarStreamToDirectory(tarballStream, getRepositoryPath());
// And use them for new Derby repo instance // And use them for new Derby repo instance
setCluster(new DerbySqoopMiniCluster(getRepositoryPath(), getTemporaryJettyPath() + "/sqoop-mini-cluster", hadoopCluster.getConfiguration())); sqoopMiniCluster = new DerbySqoopMiniCluster(getRepositoryPath(), getTemporaryJettyPath() + "/sqoop-mini-cluster", getHadoopConf());
// Start server // Start server
getCluster().start(); sqoopMiniCluster.start();
// Initialize Sqoop Client API // Initialize Sqoop Client API
setClient(new SqoopClient(getServerUrl())); initSqoopClient(sqoopMiniCluster.getServerUrl());
jobIdToNameMap = new HashMap<Long, String>(); jobIdToNameMap = new HashMap<Long, String>();
for(MJob job : getClient().getJobs()) { for(MJob job : getClient().getJobs()) {
@ -170,7 +163,7 @@ public void startSqoopMiniCluster(ITestContext context) throws Exception {
@AfterMethod @AfterMethod
public void stopSqoopMiniCluster() throws Exception { public void stopSqoopMiniCluster() throws Exception {
getCluster().stop(); sqoopMiniCluster.stop();
} }
@Test @Test