5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-08 13:40:28 +08:00

SQOOP-2004: Sqoop2: Make annotations run always

(Abraham Elmahrek via Jarek Jarcec Cecho)
This commit is contained in:
Jarek Jarcec Cecho 2015-01-15 07:41:48 -08:00
parent 3f8cd0a7cd
commit 819b13802a
44 changed files with 65 additions and 64 deletions

View File

@ -53,7 +53,7 @@ public class TestSqoopClient {
SqoopResourceRequests resourceRequests;
SqoopClient client;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
resourceRequests = mock(SqoopResourceRequests.class);
client = new SqoopClient("my-cool-server");

View File

@ -27,7 +27,7 @@ public class TestHostAndPortValidator {
AbstractValidator<String> validator = new HostAndPortValidator();
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
validator.reset();
assertEquals(0, validator.getMessages().size());

View File

@ -37,7 +37,7 @@ public GenericJdbcExecutorTest() {
GenericJdbcTestConstants.URL, null, null);
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
if(executor.existTable(emptyTable)) {
executor.executeUpdate("DROP TABLE " + emptyTable);

View File

@ -54,7 +54,7 @@ public TestExtractor() {
tableName = getClass().getSimpleName().toUpperCase();
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
GenericJdbcTestConstants.URL, null, null);
@ -73,7 +73,7 @@ public void setUp() {
}
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() {
executor.close();
}

View File

@ -59,7 +59,7 @@ public TestFromInitializer() {
tableColumns = "ICOL,VCOL";
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
GenericJdbcTestConstants.URL, null, null);
@ -108,7 +108,7 @@ public Schema getSchema(String name) {
;
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() {
executor.close();
}

View File

@ -42,7 +42,7 @@ public class TestGenericJdbcConnectorUpgrader {
private GenericJdbcConnectorUpgrader upgrader;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
upgrader = new GenericJdbcConnectorUpgrader();
}

View File

@ -56,7 +56,7 @@ public TestLoader(int numberOfRows) {
this.tableName = getClass().getSimpleName().toUpperCase();
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
GenericJdbcTestConstants.URL, null, null);
@ -70,7 +70,7 @@ public void setUp() {
}
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() {
executor.close();
}

View File

@ -57,7 +57,7 @@ public TestToInitializer() {
tableColumns = "ICOL,VCOL";
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
executor = new GenericJdbcExecutor(GenericJdbcTestConstants.DRIVER,
GenericJdbcTestConstants.URL, null, null);
@ -74,7 +74,7 @@ public void setUp() {
}
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() {
executor.close();
}

View File

@ -75,7 +75,7 @@ public static Object[][] data() {
return parameters.toArray(new Object[0][]);
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
FileUtils.mkdirs(inputDirectory);
switch (this.outputFileType) {
@ -89,7 +89,7 @@ public void setUp() throws Exception {
}
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() throws IOException {
FileUtils.delete(inputDirectory);
}

View File

@ -85,10 +85,10 @@ public static Object[][] data() {
return parameters.toArray(new Object[0][]);
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() throws IOException {
FileUtils.delete(outputDirectory);
}

View File

@ -60,7 +60,7 @@ public TestPartitioner(ToFormat outputFileType, Class<? extends CompressionCodec
this.compressionClass = compressionClass;
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
partitioner = new HdfsPartitioner();
FileUtils.mkdirs(inputDirectory);
@ -76,7 +76,7 @@ public void setUp() throws Exception {
}
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() throws IOException {
FileUtils.delete(inputDirectory);
}

View File

@ -40,7 +40,7 @@ public class TestKafkaLoader {
private static KafkaLoader loader;
private static String TOPIC = "mytopic";
@BeforeClass
@BeforeClass(alwaysRun = true)
public static void setup() throws IOException {
testUtil.prepare();
List<String> topics = new ArrayList<String>(1);
@ -49,7 +49,7 @@ public static void setup() throws IOException {
loader = new KafkaLoader();
}
@AfterClass
@AfterClass(alwaysRun = true)
public static void tearDown() throws IOException {
testUtil.tearDown();
}

View File

@ -60,7 +60,7 @@ public IObjectFactory getObjectFactory() {
return new org.powermock.modules.testng.PowerMockObjectFactory();
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);
when(datasetMock.newWriter()).thenReturn(writerMock);
@ -73,7 +73,7 @@ public void setUp() {
executor = new KiteDatasetExecutor(datasetMock);
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() {
executor.closeWriter();
executor.closeReader();

View File

@ -56,7 +56,7 @@ public void writeRecord(Object obj) {
}
};
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);

View File

@ -37,7 +37,7 @@ public class TestKiteFromInitializer extends PowerMockTestCase {
private KiteFromInitializer initializer;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);
mockStatic(Datasets.class);

View File

@ -40,7 +40,7 @@ public class TestKiteLoader {
@org.mockito.Mock
private KiteDatasetExecutor executorMock;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);

View File

@ -52,7 +52,7 @@ public class TestKiteToDestroyer extends PowerMockTestCase {
@org.mockito.Mock
private KiteDatasetExecutor executorMock;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);
mockStatic(KiteDatasetExecutor.class);

View File

@ -41,7 +41,7 @@ public class TestKiteToInitializer extends PowerMockTestCase {
private KiteToInitializer initializer;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
initMocks(this);
mockStatic(Datasets.class);

View File

@ -67,7 +67,7 @@ public class TestAVROIntermediateDataFormat {
private final static org.joda.time.LocalTime time = new org.joda.time.LocalTime(12, 59, 59);
private final static org.joda.time.LocalDate date = new org.joda.time.LocalDate(2014, 10, 01);
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
createAvroIDF();
}

View File

@ -53,7 +53,7 @@ public class TestCSVIntermediateDataFormat {
private CSVIntermediateDataFormat dataFormat;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
}

View File

@ -52,7 +52,7 @@ public class TestJSONIntermediateDataFormat {
private final static String dateTime = "'2014-10-01 12:00:00.000'";
private final static String time = "'12:59:59'";
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
createJSONIDF();
}

View File

@ -29,7 +29,7 @@ public class TestLocationMatcher {
private LocationMatcher matcher;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
matcher = null;
}

View File

@ -29,7 +29,7 @@ public class TestNameMatcher {
private NameMatcher matcher;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
matcher = null;
}

View File

@ -26,7 +26,7 @@
public class TestSqoopConfiguration {
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
// Unset any configuration dir if it is set by another test
System.getProperties().remove(ConfigurationConstants.SYSPROP_CONFIG_DIR);

View File

@ -39,7 +39,7 @@ public class TestDriverConfigUpgrader {
DriverUpgrader upgrader;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void initializeUpgrader() {
upgrader = new DriverUpgrader();
}

View File

@ -48,7 +48,7 @@ public class TestJobManager {
private RepositoryManager repositoryManagerMock;
private Repository jdbcRepoMock;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
jobManager = new JobManager();
connectorMgrMock = mock(ConnectorManager.class);

View File

@ -33,7 +33,7 @@ public class TestJobRequest {
private JobRequest jobRequest;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void initializeSubmissionRequest() {
jobRequest = new JobRequest();
}

View File

@ -76,7 +76,7 @@ public class TestJdbcRepository {
private ConnectorConfigurableUpgrader connectorUpgraderMock;
private DriverUpgrader driverUpgraderMock;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
repoTransactionMock = mock(JdbcRepositoryTransaction.class);
connectorMgrMock = mock(ConnectorManager.class);

View File

@ -44,7 +44,7 @@ public class TestSqoopWritable {
private SqoopWritable writable;
private IntermediateDataFormat<?> idfMock;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
idfMock = mock(IntermediateDataFormat.class);
writable = new SqoopWritable(idfMock);

View File

@ -44,7 +44,7 @@ public class TestMRConfigurationUtils {
Job job;
JobConf jobConfSpy;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
setUpHadoopJob();
setUpHadoopJobConf();

View File

@ -155,7 +155,7 @@ private Schema getSchema() {
return new Schema("test").addColumn(new Text("t"));
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() {
conf = new Configuration();
conf.setIfUnset(MRJobConstants.TO_INTERMEDIATE_DATA_FORMAT,

View File

@ -60,14 +60,14 @@ abstract public class DerbyTestCase {
private Connection connection;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
// Create link to the database
Class.forName(DERBY_DRIVER).newInstance();
connection = DriverManager.getConnection(getStartJdbcUrl());
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() throws Exception {
// Close active link
if (connection != null) {

View File

@ -34,7 +34,7 @@ public class TestConnectorHandling extends DerbyTestCase {
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();
handler = new DerbyRepositoryHandler();

View File

@ -34,7 +34,7 @@ public class TestDriverHandling extends DerbyTestCase {
private static final Object CURRENT_DRIVER_VERSION = "1";
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();
handler = new DerbyRepositoryHandler();

View File

@ -48,7 +48,7 @@ public class TestInputTypes extends DerbyTestCase {
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();

View File

@ -48,7 +48,7 @@ public class TestJobHandling extends DerbyTestCase {
DerbyRepositoryHandler handler;
Connection derbyConnection;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();
derbyConnection = getDerbyDatabaseConnection();

View File

@ -42,7 +42,7 @@ public class TestLinkHandling extends DerbyTestCase {
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();

View File

@ -30,7 +30,7 @@ public class TestRepositoryUpgrade extends DerbyTestCase {
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();
handler = new TestDerbyRepositoryHandler();

View File

@ -38,7 +38,7 @@ public class TestSubmissionHandling extends DerbyTestCase {
DerbyRepositoryHandler handler;
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
super.setUp();

View File

@ -21,8 +21,8 @@
import org.apache.sqoop.common.test.db.PostgreSQLProvider;
import org.testng.SkipException;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* Abstract class with convenience methods for testing postgresql repository.
@ -33,13 +33,13 @@ abstract public class PostgresqlTestCase {
public static PostgresqlTestUtils utils;
public PostgresqlRepositoryHandler handler;
@Test
public static void setUpClass() {
@BeforeClass(alwaysRun = true)
public void setUpClass() {
provider = new PostgreSQLProvider();
utils = new PostgresqlTestUtils(provider);
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
try {
provider.start();
@ -51,7 +51,7 @@ public void setUp() throws Exception {
handler.createOrUpgradeRepository(provider.getConnection());
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void tearDown() throws Exception {
provider.dropSchema("sqoop");
provider.stop();

View File

@ -75,7 +75,7 @@ public void finished(MSubmission submission) {
}
};
@BeforeClass
@BeforeClass(alwaysRun = true)
public static void startHadoop() throws Exception {
// Start Hadoop Clusters
hadoopCluster = HadoopRunnerFactory.getHadoopCluster(System.getProperties(), HadoopMiniClusterRunner.class);
@ -88,14 +88,14 @@ public static void startHadoop() throws Exception {
LOG.debug("HDFS Client: " + hdfsClient);
}
@BeforeClass
@BeforeClass(alwaysRun = true)
public static void startProvider() throws Exception {
provider = DatabaseProviderFactory.getProvider(System.getProperties());
LOG.info("Starting database provider: " + provider.getClass().getName());
provider.start();
}
@AfterClass
@AfterClass(alwaysRun = true)
public static void stopProvider() {
LOG.info("Stopping database provider: " + provider.getClass().getName());
provider.stop();

View File

@ -38,13 +38,13 @@ public class KafkaConnectorTestCase extends ConnectorTestCase {
private static TestUtil testUtil = TestUtil.getInstance();
private static final String TOPIC = "mytopic";
@BeforeClass
@BeforeClass(alwaysRun = true)
public static void startKafka() throws IOException {
// starts Kafka server and its dependent zookeeper
testUtil.prepare();
}
@AfterClass
@AfterClass(alwaysRun = true)
public static void stopKafka() throws IOException {
testUtil.tearDown();
}

View File

@ -88,7 +88,7 @@ abstract public class TomcatTestCase {
*/
private SqoopClient client;
@BeforeClass
@BeforeClass(alwaysRun = true)
public static void startHadoop() throws Exception {
// Start Hadoop Clusters
hadoopCluster = HadoopRunnerFactory.getHadoopCluster(System.getProperties(), HadoopLocalRunner.class);
@ -101,12 +101,12 @@ public static void startHadoop() throws Exception {
LOG.debug("HDFS Client: " + hdfsClient);
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void findMethodName(Method method) {
name = method.getName();
}
@BeforeMethod
@BeforeMethod(alwaysRun = true)
public void startServer() throws Exception {
// Get and set temporary path in hadoop cluster.
tmpPath = HdfsUtils.joinPathFragments(TMP_PATH_BASE, getClass().getName(), name);
@ -122,12 +122,12 @@ public void startServer() throws Exception {
client = new SqoopClient(getServerUrl());
}
@AfterMethod
@AfterMethod(alwaysRun = true)
public void stopServer() throws Exception {
cluster.stop();
}
@AfterClass
@AfterClass(alwaysRun = true)
public static void stopHadoop() throws Exception {
hadoopCluster.stop();
}

View File

@ -34,6 +34,7 @@
/**
*
*/
@Test(groups = "slow")
public class PartitionerTest extends ConnectorTestCase implements ITest {
/**