5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-16 00:41:23 +08:00

SQOOP-3091: Clean up expected exception logic in

tests - Part I

(Boglarka Egyed via Attila Szabo)
This commit is contained in:
Attila Szabo 2017-01-11 12:11:25 +01:00
parent 0bd850396e
commit 7c091a3313
7 changed files with 233 additions and 178 deletions

View File

@ -37,6 +37,7 @@
import java.util.ArrayList;
import java.util.List;
import junit.framework.JUnit4TestAdapter;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
@ -49,12 +50,22 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test that we can export Avro Data Files from HDFS into databases.
*/
@RunWith(JUnit4.class)
public class TestAvroExport extends ExportJobTestCase {
@Rule
public ExpectedException thrown = ExpectedException.none();
/**
* @return an argv for the CodeGenTool to use when creating tables to export.
*/
@ -342,6 +353,7 @@ protected void assertColMinAndMax(String colName, ColumnGenerator generator)
assertColValForRowId(maxId, colName, expectedMax);
}
@Test
public void testSupportedAvroTypes() throws IOException, SQLException {
GenericData.get().addLogicalTypeConversion(new Conversions.DecimalConversion());
@ -383,6 +395,7 @@ public void testSupportedAvroTypes() throws IOException, SQLException {
}
}
@Test
public void testPathPatternInExportDir() throws IOException, SQLException {
final int TOTAL_RECORDS = 10;
@ -403,6 +416,7 @@ public void testPathPatternInExportDir() throws IOException, SQLException {
verifyExport(TOTAL_RECORDS);
}
@Test
public void testNullableField() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1 * 10;
@ -421,6 +435,7 @@ public void testNullableField() throws IOException, SQLException {
assertColMinAndMax(forIdx(1), gen1);
}
@Test
public void testAvroRecordsNotSupported() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -434,15 +449,12 @@ public void testAvroRecordsNotSupported() throws IOException, SQLException {
ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
createAvroFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Avro records can not be exported.");
} catch (Exception e) {
// expected
assertTrue(true);
}
}
@Test
public void testMissingDatabaseFields() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -458,6 +470,7 @@ public void testMissingDatabaseFields() throws IOException, SQLException {
}
// Test Case for Issue [SQOOP-2846]
@Test
public void testAvroWithUpsert() throws IOException, SQLException {
String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
final int TOTAL_RECORDS = 2;
@ -465,15 +478,13 @@ public void testAvroWithUpsert() throws IOException, SQLException {
// Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
createAvroFile(0, TOTAL_RECORDS, null);
createTableWithInsert();
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
} catch (Exception e) {
// expected
assertTrue(true);
}
}
// Test Case for Issue [SQOOP-2846]
@Test
public void testAvroWithUpdateKey() throws IOException, SQLException {
String[] argv = { "--update-key", "ID" };
final int TOTAL_RECORDS = 1;
@ -484,6 +495,8 @@ public void testAvroWithUpdateKey() throws IOException, SQLException {
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
verifyExport(getMsgPrefix() + "0");
}
@Test
public void testMissingAvroFields() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -492,15 +505,12 @@ public void testMissingAvroFields() throws IOException, SQLException {
ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
createAvroFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Missing Avro field.");
} catch (Exception e) {
// expected
assertTrue(true);
}
}
@Test
public void testSpecifiedColumnsAsAvroFields() throws IOException, SQLException {
final int TOTAL_RECORDS = 10;
ColumnGenerator[] gens = new ColumnGenerator[] {
@ -523,4 +533,9 @@ public void testSpecifiedColumnsAsAvroFields() throws IOException, SQLException
assertColValForRowId(9, "col3", null);
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestAvroExport.class);
}
}

View File

@ -24,6 +24,7 @@
import java.util.List;
import java.util.Map;
import junit.framework.JUnit4TestAdapter;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
@ -33,13 +34,23 @@
import com.cloudera.sqoop.manager.ManagerFactory;
import com.cloudera.sqoop.metastore.JobData;
import com.cloudera.sqoop.tool.ImportTool;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test the ConnFactory implementation and its ability to delegate to multiple
* different ManagerFactory implementations using reflection.
*/
@RunWith(JUnit4.class)
public class TestConnFactory extends TestCase {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test
public void testCustomFactory() throws IOException {
Configuration conf = new Configuration();
conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY,
@ -52,20 +63,19 @@ public void testCustomFactory() throws IOException {
assertTrue("Expected a DummyManager", manager instanceof DummyManager);
}
public void testExceptionForNoManager() {
@Test
public void testExceptionForNoManager() throws IOException {
Configuration conf = new Configuration();
conf.set(ConnFactory.FACTORY_CLASS_NAMES_KEY, EmptyFactory.class.getName());
ConnFactory factory = new ConnFactory(conf);
try {
thrown.expect(IOException.class);
factory.getManager(
new JobData(new SqoopOptions(), new ImportTool()));
fail("factory.getManager() expected to throw IOException");
} catch (IOException ioe) {
// Expected this. Test passes.
}
}
@Test
public void testMultipleManagers() throws IOException {
Configuration conf = new Configuration();
// The AlwaysDummyFactory is second in this list. Nevertheless, since
@ -185,4 +195,9 @@ public void importTable(ImportJobContext context) {
public void release() {
}
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestConnFactory.class);
}
}

View File

@ -27,6 +27,7 @@
import java.sql.ResultSet;
import java.sql.SQLException;
import junit.framework.JUnit4TestAdapter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -34,13 +35,22 @@
import com.cloudera.sqoop.testutil.CommonArgs;
import com.cloudera.sqoop.testutil.ExportJobTestCase;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test that we can update a copy of data in the database,
* based on newer data in HDFS.
*/
@RunWith(JUnit4.class)
public class TestExportUpdate extends ExportJobTestCase {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Override
protected String getTablePrefix() {
return "UPDATE_TABLE_";
@ -405,6 +415,7 @@ private void runUpdate(int numMappers, String updateCol) throws IOException {
"--update-key", updateCol));
}
@Test
public void testBasicUpdate() throws Exception {
// Test that we can do a single-task single-file update.
// This updates the entire database.
@ -424,6 +435,7 @@ public void testBasicUpdate() throws Exception {
* and then modifies a subset of the rows via update.
* @throws Exception
*/
@Test
public void testMultiKeyUpdate() throws Exception {
createMultiKeyTable(3);
@ -461,6 +473,7 @@ public void testMultiKeyUpdate() throws Exception {
* and then modifies a subset of the rows via update.
* @throws Exception
*/
@Test
public void testMultiKeyUpdateMultipleFilesNoUpdate() throws Exception {
createMultiKeyTable(4);
@ -504,6 +517,7 @@ public void testMultiKeyUpdateMultipleFilesNoUpdate() throws Exception {
* and then modifies a subset of the rows via update.
* @throws Exception
*/
@Test
public void testMultiKeyUpdateMultipleFilesFullUpdate() throws Exception {
createMultiKeyTable(4);
@ -542,7 +556,7 @@ public void testMultiKeyUpdateMultipleFilesFullUpdate() throws Exception {
new int[] { 3, 2 }, 3, 2, "3bar2");
}
@Test
public void testEmptyTable() throws Exception {
// Test that an empty table will "accept" updates that modify
// no rows; no new data is injected into the database.
@ -552,6 +566,7 @@ public void testEmptyTable() throws Exception {
verifyRowCount(0);
}
@Test
public void testEmptyFiles() throws Exception {
// An empty input file results in no changes to a db table.
populateDatabase(10);
@ -564,6 +579,7 @@ public void testEmptyFiles() throws Exception {
verifyRow("A", "9", "9", "foo9", "9");
}
@Test
public void testStringCol() throws Exception {
// Test that we can do modifications based on the string "B" column.
populateDatabase(10);
@ -575,6 +591,7 @@ public void testStringCol() throws Exception {
verifyRow("B", "'foo9'", "18", "foo9", "18");
}
@Test
public void testLastCol() throws Exception {
// Test that we can do modifications based on the third int column.
populateDatabase(10);
@ -586,6 +603,7 @@ public void testLastCol() throws Exception {
verifyRow("C", "9", "18", "foo18", "9");
}
@Test
public void testMultiMaps() throws Exception {
// Test that we can handle multiple map tasks.
populateDatabase(20);
@ -600,6 +618,7 @@ public void testMultiMaps() throws Exception {
verifyRow("A", "19", "19", "foo38", "38");
}
@Test
public void testSubsetUpdate() throws Exception {
// Update only a few rows in the middle of the table.
populateDatabase(10);
@ -619,6 +638,7 @@ public void testSubsetUpdate() throws Exception {
verifyRow("A", "7", "7", "foo14", "14");
}
@Test
public void testSubsetUpdate2() throws Exception {
// Update only some of the rows in the db. Also include some
// updates that do not affect actual rows in the table.
@ -647,6 +667,7 @@ public void testSubsetUpdate2() throws Exception {
*
* @throws Exception
*/
@Test
public void testUpdateColumnSubset() throws Exception {
populateDatabase(4);
createUpdateFiles(1, 3, 0);
@ -675,15 +696,18 @@ public void testUpdateColumnSubset() throws Exception {
*
* @throws Exception
*/
@Test
public void testUpdateColumnNotInColumns() throws Exception {
populateDatabase(1);
try {
thrown.expect(IOException.class);
runExport(getArgv(true, 2, 2, "-m", "1",
"--update-key", "A", "--columns", "B"));
fail("Expected IOException");
} catch (IOException e) {
assertTrue(true);
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestExportUpdate.class);
}
}

View File

@ -20,10 +20,16 @@
import com.cloudera.sqoop.testutil.ExportJobTestCase;
import com.google.common.collect.Lists;
import junit.framework.JUnit4TestAdapter;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.kitesdk.data.*;
import java.io.IOException;
@ -41,8 +47,12 @@
/**
* Test that we can export Parquet Data Files from HDFS into databases.
*/
@RunWith(JUnit4.class)
public class TestParquetExport extends ExportJobTestCase {
@Rule
public ExpectedException thrown = ExpectedException.none();
/**
* @return an argv for the CodeGenTool to use when creating tables to export.
*/
@ -318,6 +328,7 @@ protected void assertColMinAndMax(String colName, ColumnGenerator generator)
assertColValForRowId(maxId, colName, expectedMax);
}
@Test
public void testSupportedParquetTypes() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1 * 10;
@ -351,6 +362,7 @@ public void testSupportedParquetTypes() throws IOException, SQLException {
}
}
@Test
public void testNullableField() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1 * 10;
@ -369,6 +381,7 @@ public void testNullableField() throws IOException, SQLException {
assertColMinAndMax(forIdx(1), gen1);
}
@Test
public void testParquetRecordsNotSupported() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -382,15 +395,12 @@ public void testParquetRecordsNotSupported() throws IOException, SQLException {
ColumnGenerator gen = colGenerator(record, schema, null, "VARCHAR(64)");
createParquetFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Parquet records can not be exported.");
} catch (Exception e) {
// expected
assertTrue(true);
}
}
@Test
public void testMissingDatabaseFields() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -405,6 +415,7 @@ public void testMissingDatabaseFields() throws IOException, SQLException {
verifyExport(TOTAL_RECORDS);
}
@Test
public void testParquetWithUpdateKey() throws IOException, SQLException {
String[] argv = { "--update-key", "ID" };
final int TOTAL_RECORDS = 1;
@ -415,6 +426,7 @@ public void testParquetWithUpdateKey() throws IOException, SQLException {
}
// Test Case for Issue [SQOOP-2846]
@Test
public void testParquetWithUpsert() throws IOException, SQLException {
String[] argv = { "--update-key", "ID", "--update-mode", "allowinsert" };
final int TOTAL_RECORDS = 2;
@ -422,13 +434,12 @@ public void testParquetWithUpsert() throws IOException, SQLException {
// Schema.create(Schema.Type.STRING), null, "VARCHAR(64)");
createParquetFile(0, TOTAL_RECORDS, null);
createTableWithInsert();
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
} catch (Exception e) {
// expected
assertTrue(true);
}
}
@Test
public void testMissingParquetFields() throws IOException, SQLException {
String[] argv = {};
final int TOTAL_RECORDS = 1;
@ -437,13 +448,14 @@ public void testMissingParquetFields() throws IOException, SQLException {
ColumnGenerator gen = colGenerator(null, null, null, "VARCHAR(64)");
createParquetFile(0, TOTAL_RECORDS, gen);
createTable(gen);
try {
thrown.expect(Exception.class);
runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + 1)));
fail("Missing Parquet field.");
} catch (Exception e) {
// expected
assertTrue(true);
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestParquetExport.class);
}
}

View File

@ -21,6 +21,7 @@
import java.util.Properties;
import com.cloudera.sqoop.tool.BaseSqoopTool;
import junit.framework.JUnit4TestAdapter;
import junit.framework.TestCase;
import org.apache.commons.lang.ArrayUtils;
@ -31,7 +32,9 @@
import com.cloudera.sqoop.testutil.HsqldbTestServer;
import org.junit.Before;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@ -48,6 +51,9 @@ public class TestSqoopOptions extends TestCase {
private Properties originalSystemProperties;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void setup() {
originalSystemProperties = System.getProperties();
@ -66,22 +72,14 @@ public void testNormalChar() throws Exception {
@Test
public void testEmptyString() throws Exception {
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
SqoopOptions.toChar("");
fail("Expected exception");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expect this.
}
}
@Test
public void testNullString() throws Exception {
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
SqoopOptions.toChar(null);
fail("Expected exception");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expect this.
}
}
@Test
@ -134,22 +132,14 @@ public void testWhitespaceToChar() throws Exception {
@Test
public void testUnknownEscape1() throws Exception {
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
SqoopOptions.toChar("\\Q");
fail("Expected exception");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expect this.
}
}
@Test
public void testUnknownEscape2() throws Exception {
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
SqoopOptions.toChar("\\nn");
fail("Expected exception");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expect this.
}
}
@Test
@ -184,22 +174,14 @@ public void testOctalChar2() throws Exception {
@Test
public void testErrOctalChar() throws Exception {
try {
thrown.expect(NumberFormatException.class);
SqoopOptions.toChar("\\095");
fail("Expected exception");
} catch (NumberFormatException nfe) {
// expected.
}
}
@Test
public void testErrHexChar() throws Exception {
try {
thrown.expect(NumberFormatException.class);
SqoopOptions.toChar("\\0x9K5");
fail("Expected exception");
} catch (NumberFormatException nfe) {
// expected.
}
}
private SqoopOptions parse(String [] argv) throws Exception {
@ -258,12 +240,8 @@ public void testBadNumMappers1() throws Exception {
"x",
};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
parse(args);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expected.
}
}
@Test
@ -273,12 +251,8 @@ public void testBadNumMappers2() throws Exception {
"x",
};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
parse(args);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// expected.
}
}
@Test
@ -719,12 +693,9 @@ public void testDeleteTargetDirWithAppend() throws Exception {
"--append",
"--delete-target-dir",
};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
validateImportOptions(extraArgs);
fail("Expected InvalidOptionsException");
} catch(SqoopOptions.InvalidOptionsException ioe) {
// Expected
}
}
//test incompatability of --delete-target-dir with incremental import
@ -734,12 +705,9 @@ public void testDeleteWithIncrementalImport() throws Exception {
"--incremental", "append",
"--delete-target-dir",
};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
validateImportOptions(extraArgs);
fail("Expected InvalidOptionsException");
} catch(SqoopOptions.InvalidOptionsException ioe) {
// Expected
}
}
// test that hbase bulk load import with table name and target dir
@ -761,12 +729,9 @@ public void testHBaseBulkLoadMissingHbaseTable() throws Exception {
String [] extraArgs = {
longArgument(BaseSqoopTool.HBASE_BULK_LOAD_ENABLED_ARG),
longArgument(BaseSqoopTool.TARGET_DIR_ARG), "./test"};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
validateImportOptions(extraArgs);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// Expected
}
}
private static String longArgument(String argument) {
@ -796,11 +761,13 @@ public void testResetToOneMapperAndSplitBy() throws Exception {
"--split-by",
"col0",
};
try {
thrown.expect(SqoopOptions.InvalidOptionsException.class);
validateImportOptions(extraArgs);
fail("Expected InvalidOptionsException");
} catch (SqoopOptions.InvalidOptionsException ioe) {
// Expected
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestSqoopOptions.class);
}
}

View File

@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.ArrayList;
import junit.framework.JUnit4TestAdapter;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -32,15 +33,24 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Test that --target-dir works.
*/
@RunWith(JUnit4.class)
public class TestTargetDir extends ImportJobTestCase {
public static final Log LOG = LogFactory
.getLog(TestTargetDir.class.getName());
@Rule
public ExpectedException thrown = ExpectedException.none();
/**
* Create the argv to pass to Sqoop.
*
@ -70,9 +80,8 @@ protected String getTableName() {
}
/** test invalid argument exception if several output options. */
@Test
public void testSeveralOutputsIOException() throws IOException {
try {
ArrayList args = getOutputArgv(true);
args.add("--warehouse-dir");
args.add(getWarehouseDir());
@ -80,17 +89,13 @@ public void testSeveralOutputsIOException() throws IOException {
args.add(getWarehouseDir());
String[] argv = (String[]) args.toArray(new String[0]);
thrown.expect(IOException.class);
runImport(argv);
fail("warehouse-dir & target-dir were set and run "
+ "without problem reported");
} catch (IOException e) {
// expected
}
}
/** test target-dir contains imported files. */
@Test
public void testTargetDir() throws IOException {
try {
@ -123,9 +128,8 @@ public void testTargetDir() throws IOException {
/** test target-dir breaks if already existing
* (only allowed in append mode). */
@Test
public void testExistingTargetDir() throws IOException {
try {
String targetDir = getWarehouseDir() + "/tempTargetDir";
ArrayList args = getOutputArgv(true);
@ -140,12 +144,13 @@ public void testExistingTargetDir() throws IOException {
}
String[] argv = (String[]) args.toArray(new String[0]);
thrown.expect(IOException.class);
runImport(argv);
fail("Existing target-dir run without problem report");
} catch (IOException e) {
// expected
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestTargetDir.class);
}
}

View File

@ -29,6 +29,7 @@
import java.sql.Time;
import java.sql.Types;
import junit.framework.JUnit4TestAdapter;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.manager.GenericJdbcManager;
@ -39,11 +40,18 @@
import com.cloudera.sqoop.SqoopOptions;
import com.cloudera.sqoop.TestExport;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* We'll use H2 as a database as the version of HSQLDB we currently depend on
* (1.8) doesn't include support for stored procedures.
*/
@RunWith(JUnit4.class)
public class TestExportUsingProcedure extends TestExport {
private static final String PROCEDURE_NAME = "INSERT_PROCEDURE";
/**
@ -55,6 +63,17 @@ public class TestExportUsingProcedure extends TestExport {
private String[] types;
private Connection connection;
@Rule
public ExpectedException thrown = ExpectedException.none();
@Rule
public TestName testName = new TestName();
@Override
public String getName() {
return testName.getMethodName();
}
@Override
@Before
public void setUp() {
@ -193,31 +212,24 @@ public void tearDown() {
// TEST OVERRIDES
@Override
public void testMultiMapTextExportWithStaging() throws IOException,
SQLException {
try {
@Test
public void testMultiMapTextExportWithStaging() throws IOException, SQLException {
thrown.expect(IOException.class);
super.testMultiMapTextExportWithStaging();
fail("staging tables not compatible with --call");
} catch (IOException e) {
// expected
}
}
@Override
public void testMultiTransactionWithStaging() throws IOException,
SQLException {
try {
@Test
public void testMultiTransactionWithStaging() throws IOException, SQLException {
thrown.expect(IOException.class);
super.testMultiTransactionWithStaging();
fail("staging tables not compatible with --call");
} catch (IOException e) {
// expected
}
}
/**
* H2 renames the stored procedure arguments P1, P2, ..., Pn.
*/
@Override
@Test
public void testColumnsExport() throws IOException, SQLException {
super.testColumnsExport("P1,P2,P3,P4");
}
@ -326,4 +338,9 @@ public void set(PreparedStatement on) throws SQLException {
});
}
//workaround: ant kept falling back to JUnit3
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(TestExportUsingProcedure.class);
}
}