mirror of
https://github.com/apache/sqoop.git
synced 2025-05-17 09:20:43 +08:00
SQOOP-3092: Clean up expected exception logic in
tests - Part II (Boglarka Egyed via Attila Szabo)
This commit is contained in:
parent
7c091a3313
commit
d006bc7515
@ -26,6 +26,7 @@
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import org.apache.avro.generic.GenericRecord;
|
import org.apache.avro.generic.GenericRecord;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
@ -34,6 +35,7 @@
|
|||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
@ -46,6 +48,9 @@
|
|||||||
import com.cloudera.sqoop.tool.ImportTool;
|
import com.cloudera.sqoop.tool.ImportTool;
|
||||||
import com.cloudera.sqoop.tool.SqoopTool;
|
import com.cloudera.sqoop.tool.SqoopTool;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
import org.kitesdk.data.Dataset;
|
import org.kitesdk.data.Dataset;
|
||||||
import org.kitesdk.data.DatasetReader;
|
import org.kitesdk.data.DatasetReader;
|
||||||
import org.kitesdk.data.Datasets;
|
import org.kitesdk.data.Datasets;
|
||||||
@ -53,11 +58,15 @@
|
|||||||
/**
|
/**
|
||||||
* Test HiveImport capability after an import to HDFS.
|
* Test HiveImport capability after an import to HDFS.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestHiveImport extends ImportJobTestCase {
|
public class TestHiveImport extends ImportJobTestCase {
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(
|
public static final Log LOG = LogFactory.getLog(
|
||||||
TestHiveImport.class.getName());
|
TestHiveImport.class.getName());
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
@ -406,20 +415,16 @@ public void testAppendHiveImportAsParquet() throws IOException {
|
|||||||
* Test hive create and --as-parquetfile options validation.
|
* Test hive create and --as-parquetfile options validation.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testCreateHiveImportAsParquet() throws ParseException {
|
public void testCreateHiveImportAsParquet() throws ParseException, InvalidOptionsException {
|
||||||
final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
|
final String TABLE_NAME = "CREATE_HIVE_IMPORT_AS_PARQUET";
|
||||||
setCurTableName(TABLE_NAME);
|
setCurTableName(TABLE_NAME);
|
||||||
setNumCols(3);
|
setNumCols(3);
|
||||||
String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
|
String [] extraArgs = {"--as-parquetfile", "--create-hive-table"};
|
||||||
ImportTool tool = new ImportTool();
|
ImportTool tool = new ImportTool();
|
||||||
|
|
||||||
try {
|
thrown.expect(InvalidOptionsException.class);
|
||||||
tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
|
tool.validateOptions(tool.parseArguments(getArgv(false, extraArgs), null,
|
||||||
null, true));
|
null, true));
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (InvalidOptionsException ex) {
|
|
||||||
/* success */
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -449,7 +454,7 @@ public void testNumeric() throws IOException {
|
|||||||
|
|
||||||
/** If bin/hive returns an error exit status, we should get an IOException. */
|
/** If bin/hive returns an error exit status, we should get an IOException. */
|
||||||
@Test
|
@Test
|
||||||
public void testHiveExitFails() {
|
public void testHiveExitFails() throws IOException {
|
||||||
// The expected script is different than the one which would be generated
|
// The expected script is different than the one which would be generated
|
||||||
// by this, so we expect an IOException out.
|
// by this, so we expect an IOException out.
|
||||||
final String TABLE_NAME = "FAILING_HIVE_IMPORT";
|
final String TABLE_NAME = "FAILING_HIVE_IMPORT";
|
||||||
@ -457,14 +462,10 @@ public void testHiveExitFails() {
|
|||||||
setNumCols(2);
|
setNumCols(2);
|
||||||
String [] types = { "NUMERIC", "CHAR(64)" };
|
String [] types = { "NUMERIC", "CHAR(64)" };
|
||||||
String [] vals = { "3.14159", "'foo'" };
|
String [] vals = { "3.14159", "'foo'" };
|
||||||
try {
|
|
||||||
|
thrown.expect(IOException.class);
|
||||||
runImportTest(TABLE_NAME, types, vals, "failingImport.q",
|
runImportTest(TABLE_NAME, types, vals, "failingImport.q",
|
||||||
getArgv(false, null), new ImportTool());
|
getArgv(false, null), new ImportTool());
|
||||||
// If we get here, then the run succeeded -- which is incorrect.
|
|
||||||
fail("FAILING_HIVE_IMPORT test should have thrown IOException");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// expected; ok.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Test that we can set delimiters how we want them. */
|
/** Test that we can set delimiters how we want them. */
|
||||||
@ -585,7 +586,7 @@ public void testFieldWithHiveDelimsReplacement() throws IOException,
|
|||||||
* Test hive drop and replace option validation.
|
* Test hive drop and replace option validation.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testHiveDropAndReplaceOptionValidation() throws ParseException {
|
public void testHiveDropAndReplaceOptionValidation() throws ParseException, InvalidOptionsException {
|
||||||
LOG.info("Testing conflicting Hive delimiter drop/replace options");
|
LOG.info("Testing conflicting Hive delimiter drop/replace options");
|
||||||
|
|
||||||
setNumCols(3);
|
setNumCols(3);
|
||||||
@ -593,13 +594,10 @@ public void testHiveDropAndReplaceOptionValidation() throws ParseException {
|
|||||||
"--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
|
"--"+BaseSqoopTool.HIVE_DROP_DELIMS_ARG, };
|
||||||
|
|
||||||
ImportTool tool = new ImportTool();
|
ImportTool tool = new ImportTool();
|
||||||
try {
|
|
||||||
|
thrown.expect(InvalidOptionsException.class);
|
||||||
tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
|
tool.validateOptions(tool.parseArguments(getArgv(false, moreArgs), null,
|
||||||
null, true));
|
null, true));
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (InvalidOptionsException ex) {
|
|
||||||
/* success */
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -627,7 +625,7 @@ public void testImportHiveWithPartitions() throws IOException,
|
|||||||
* IOException.
|
* IOException.
|
||||||
* */
|
* */
|
||||||
@Test
|
@Test
|
||||||
public void testImportWithBadPartitionKey() {
|
public void testImportWithBadPartitionKey() throws IOException {
|
||||||
final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
|
final String TABLE_NAME = "FAILING_PARTITION_HIVE_IMPORT";
|
||||||
|
|
||||||
LOG.info("Doing import of single row into " + TABLE_NAME + " table");
|
LOG.info("Doing import of single row into " + TABLE_NAME + " table");
|
||||||
@ -654,30 +652,23 @@ public void testImportWithBadPartitionKey() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Test hive-import with the 1st args.
|
// Test hive-import with the 1st args.
|
||||||
try {
|
thrown.expect(IOException.class);
|
||||||
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
getArgv(false, moreArgs1), new ImportTool());
|
getArgv(false, moreArgs1), new ImportTool());
|
||||||
fail(TABLE_NAME + " test should have thrown IOException");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// expected; ok.
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test hive-import with the 2nd args.
|
// Test hive-import with the 2nd args.
|
||||||
try {
|
thrown.expect(IOException.class);
|
||||||
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
getArgv(false, moreArgs2), new ImportTool());
|
getArgv(false, moreArgs2), new ImportTool());
|
||||||
fail(TABLE_NAME + " test should have thrown IOException");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// expected; ok.
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test create-hive-table with the 1st args.
|
// Test create-hive-table with the 1st args.
|
||||||
try {
|
thrown.expect(IOException.class);
|
||||||
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
runImportTest(TABLE_NAME, types, vals, "partitionImport.q",
|
||||||
getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
|
getCreateTableArgv(false, moreArgs1), new CreateHiveTableTool());
|
||||||
fail(TABLE_NAME + " test should have thrown IOException");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// expected; ok.
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestHiveImport.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@ -30,26 +31,33 @@
|
|||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
import com.cloudera.sqoop.tool.ImportTool;
|
import com.cloudera.sqoop.tool.ImportTool;
|
||||||
import com.cloudera.sqoop.testutil.HsqldbTestServer;
|
import com.cloudera.sqoop.testutil.HsqldbTestServer;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
import java.sql.Types;
|
import java.sql.Types;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test Hive DDL statement generation.
|
* Test Hive DDL statement generation.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestTableDefWriter extends TestCase {
|
public class TestTableDefWriter extends TestCase {
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(
|
public static final Log LOG = LogFactory.getLog(
|
||||||
TestTableDefWriter.class.getName());
|
TestTableDefWriter.class.getName());
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
// Test getHiveOctalCharCode and expect an IllegalArgumentException.
|
// Test getHiveOctalCharCode and expect an IllegalArgumentException.
|
||||||
private void expectExceptionInCharCode(int charCode) {
|
private void expectExceptionInCharCode(int charCode) {
|
||||||
try {
|
thrown.expect(IllegalArgumentException.class);
|
||||||
TableDefWriter.getHiveOctalCharCode(charCode);
|
TableDefWriter.getHiveOctalCharCode(charCode);
|
||||||
fail("Expected IllegalArgumentException");
|
|
||||||
} catch (IllegalArgumentException iae) {
|
|
||||||
// Expected; ok.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHiveOctalCharCode() {
|
public void testHiveOctalCharCode() {
|
||||||
assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
|
assertEquals("\\000", TableDefWriter.getHiveOctalCharCode(0));
|
||||||
assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
|
assertEquals("\\001", TableDefWriter.getHiveOctalCharCode(1));
|
||||||
@ -61,6 +69,7 @@ public void testHiveOctalCharCode() {
|
|||||||
expectExceptionInCharCode(254);
|
expectExceptionInCharCode(254);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDifferentTableNames() throws Exception {
|
public void testDifferentTableNames() throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
SqoopOptions options = new SqoopOptions();
|
SqoopOptions options = new SqoopOptions();
|
||||||
@ -83,6 +92,7 @@ public void testDifferentTableNames() throws Exception {
|
|||||||
assertTrue(loadData.indexOf("/inputTable'") != -1);
|
assertTrue(loadData.indexOf("/inputTable'") != -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDifferentTargetDirs() throws Exception {
|
public void testDifferentTargetDirs() throws Exception {
|
||||||
String targetDir = "targetDir";
|
String targetDir = "targetDir";
|
||||||
String inputTable = "inputTable";
|
String inputTable = "inputTable";
|
||||||
@ -111,6 +121,7 @@ public void testDifferentTargetDirs() throws Exception {
|
|||||||
assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
|
assertTrue(loadData.indexOf("/" + targetDir + "'") != -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testPartitions() throws Exception {
|
public void testPartitions() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hive-partition-key", "ds",
|
"--hive-partition-key", "ds",
|
||||||
@ -137,6 +148,7 @@ public void testPartitions() throws Exception {
|
|||||||
assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
|
assertTrue(loadData.endsWith(" PARTITION (ds='20110413')"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testLzoSplitting() throws Exception {
|
public void testLzoSplitting() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--compress",
|
"--compress",
|
||||||
@ -165,6 +177,7 @@ public void testLzoSplitting() throws Exception {
|
|||||||
createTable);
|
createTable);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUserMapping() throws Exception {
|
public void testUserMapping() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--map-column-hive", "id=STRING,value=INTEGER",
|
"--map-column-hive", "id=STRING,value=INTEGER",
|
||||||
@ -191,6 +204,7 @@ public void testUserMapping() throws Exception {
|
|||||||
assertFalse(createTable.contains("`value` STRING"));
|
assertFalse(createTable.contains("`value` STRING"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testUserMappingFailWhenCantBeApplied() throws Exception {
|
public void testUserMappingFailWhenCantBeApplied() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--map-column-hive", "id=STRING,value=INTEGER",
|
"--map-column-hive", "id=STRING,value=INTEGER",
|
||||||
@ -205,14 +219,11 @@ public void testUserMappingFailWhenCantBeApplied() throws Exception {
|
|||||||
colTypes.put("id", Types.INTEGER);
|
colTypes.put("id", Types.INTEGER);
|
||||||
writer.setColumnTypes(colTypes);
|
writer.setColumnTypes(colTypes);
|
||||||
|
|
||||||
try {
|
thrown.expect(IllegalArgumentException.class);
|
||||||
String createTable = writer.getCreateTableStmt();
|
String createTable = writer.getCreateTableStmt();
|
||||||
fail("Expected failure on non applied mapping.");
|
|
||||||
} catch(IllegalArgumentException iae) {
|
|
||||||
// Expected, ok
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHiveDatabase() throws Exception {
|
public void testHiveDatabase() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hive-database", "db",
|
"--hive-database", "db",
|
||||||
@ -234,4 +245,9 @@ public void testHiveDatabase() throws Exception {
|
|||||||
assertNotNull(loadStmt);
|
assertNotNull(loadStmt);
|
||||||
assertTrue(createTable.contains("`db`.`outputTable`"));
|
assertTrue(createTable.contains("`db`.`outputTable`"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestTableDefWriter.class);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,28 +20,40 @@
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test looking up codecs by name.
|
* Test looking up codecs by name.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestCodecMap extends TestCase {
|
public class TestCodecMap extends TestCase {
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
private void verifyCodec(Class<?> c, String codecName)
|
private void verifyCodec(Class<?> c, String codecName)
|
||||||
throws UnsupportedCodecException {
|
throws UnsupportedCodecException {
|
||||||
CompressionCodec codec = CodecMap.getCodec(codecName, new Configuration());
|
CompressionCodec codec = CodecMap.getCodec(codecName, new Configuration());
|
||||||
assertEquals(codec.getClass(), c);
|
assertEquals(codec.getClass(), c);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetCodecNames() {
|
public void testGetCodecNames() {
|
||||||
// gzip is picked up from Hadoop defaults
|
// gzip is picked up from Hadoop defaults
|
||||||
assertTrue(CodecMap.getCodecNames().contains("gzip"));
|
assertTrue(CodecMap.getCodecNames().contains("gzip"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetCodec() throws IOException {
|
public void testGetCodec() throws IOException {
|
||||||
verifyCodec(GzipCodec.class, "gzip");
|
verifyCodec(GzipCodec.class, "gzip");
|
||||||
verifyCodec(GzipCodec.class, "Gzip");
|
verifyCodec(GzipCodec.class, "Gzip");
|
||||||
@ -52,15 +64,13 @@ public void testGetCodec() throws IOException {
|
|||||||
verifyCodec(GzipCodec.class, "org.apache.hadoop.io.compress.GzipCodec");
|
verifyCodec(GzipCodec.class, "org.apache.hadoop.io.compress.GzipCodec");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testGetShortName() throws UnsupportedCodecException {
|
public void testGetShortName() throws UnsupportedCodecException {
|
||||||
verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
|
verifyShortName("gzip", "org.apache.hadoop.io.compress.GzipCodec");
|
||||||
verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
|
verifyShortName("default", "org.apache.hadoop.io.compress.DefaultCodec");
|
||||||
try {
|
|
||||||
|
thrown.expect(IOException.class);
|
||||||
verifyShortName("NONE", "bogus");
|
verifyShortName("NONE", "bogus");
|
||||||
fail("Expected IOException");
|
|
||||||
} catch (UnsupportedCodecException e) {
|
|
||||||
// Exception is expected
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifyShortName(String expected, String codecName)
|
private void verifyShortName(String expected, String codecName)
|
||||||
@ -69,12 +79,14 @@ private void verifyShortName(String expected, String codecName)
|
|||||||
CodecMap.getCodecShortNameByName(codecName, new Configuration()));
|
CodecMap.getCodecShortNameByName(codecName, new Configuration()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUnrecognizedCodec() {
|
@Test
|
||||||
try {
|
public void testUnrecognizedCodec() throws UnsupportedCodecException {
|
||||||
|
thrown.expect(UnsupportedCodecException.class);
|
||||||
CodecMap.getCodec("bogus", new Configuration());
|
CodecMap.getCodec("bogus", new Configuration());
|
||||||
fail("'bogus' codec should throw exception");
|
|
||||||
} catch (UnsupportedCodecException e) {
|
|
||||||
// expected
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestCodecMap.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,7 @@
|
|||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.nio.CharBuffer;
|
import java.nio.CharBuffer;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@ -34,10 +35,17 @@
|
|||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the LobFile reader/writer implementation.
|
* Test the LobFile reader/writer implementation.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestLobFile extends TestCase {
|
public class TestLobFile extends TestCase {
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(
|
public static final Log LOG = LogFactory.getLog(
|
||||||
@ -57,6 +65,10 @@ public class TestLobFile extends TestCase {
|
|||||||
private Configuration conf;
|
private Configuration conf;
|
||||||
private FileSystem fs;
|
private FileSystem fs;
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
conf = new Configuration();
|
conf = new Configuration();
|
||||||
conf.set("fs.default.name", "file:///");
|
conf.set("fs.default.name", "file:///");
|
||||||
@ -130,12 +142,8 @@ private void verifyClobFile(Path p, String... expectedRecords)
|
|||||||
|
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
try {
|
thrown.expect(IOException.class);
|
||||||
reader.next();
|
reader.next();
|
||||||
fail("Expected IOException calling next after close");
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
// expected this.
|
|
||||||
}
|
|
||||||
|
|
||||||
// A second close shouldn't hurt anything. This should be a no-op.
|
// A second close shouldn't hurt anything. This should be a no-op.
|
||||||
reader.close();
|
reader.close();
|
||||||
@ -148,15 +156,18 @@ private void runClobFileTest(Path p, String codec,
|
|||||||
fs.delete(p, false);
|
fs.delete(p, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testEmptyRecord() throws Exception {
|
public void testEmptyRecord() throws Exception {
|
||||||
runClobFileTest(new Path(TEMP_BASE_DIR, "empty.lob"), null);
|
runClobFileTest(new Path(TEMP_BASE_DIR, "empty.lob"), null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSingleRecord() throws Exception {
|
public void testSingleRecord() throws Exception {
|
||||||
runClobFileTest(new Path(TEMP_BASE_DIR, "single.lob"),
|
runClobFileTest(new Path(TEMP_BASE_DIR, "single.lob"),
|
||||||
null, "this is a single record!");
|
null, "this is a single record!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMultiRecords() throws Exception {
|
public void testMultiRecords() throws Exception {
|
||||||
runClobFileTest(new Path(TEMP_BASE_DIR, "multi.lob"),
|
runClobFileTest(new Path(TEMP_BASE_DIR, "multi.lob"),
|
||||||
CodecMap.NONE,
|
CodecMap.NONE,
|
||||||
@ -165,6 +176,7 @@ public void testMultiRecords() throws Exception {
|
|||||||
"yet one more record graces this file.");
|
"yet one more record graces this file.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testMultiIndexSegments() throws Exception {
|
public void testMultiIndexSegments() throws Exception {
|
||||||
// Test that we can use multiple IndexSegments.
|
// Test that we can use multiple IndexSegments.
|
||||||
runClobFileTest(new Path(TEMP_BASE_DIR, "multi-index.lob"),
|
runClobFileTest(new Path(TEMP_BASE_DIR, "multi-index.lob"),
|
||||||
@ -231,6 +243,7 @@ private void runLineAndRecordTest(Path p, String firstLine,
|
|||||||
assertFalse(reader.isRecordAvailable());
|
assertFalse(reader.isRecordAvailable());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testVeryShortRead() throws Exception {
|
public void testVeryShortRead() throws Exception {
|
||||||
// Read only a small fraction of a record, ensure that we can
|
// Read only a small fraction of a record, ensure that we can
|
||||||
// read the next record, even when we've left more than a 16-byte
|
// read the next record, even when we've left more than a 16-byte
|
||||||
@ -250,6 +263,7 @@ public void testVeryShortRead() throws Exception {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testIncompleteOverread() throws Exception {
|
public void testIncompleteOverread() throws Exception {
|
||||||
// Read most of the first record so that we partially consume the
|
// Read most of the first record so that we partially consume the
|
||||||
// next record start mark; make sure we realign properly.
|
// next record start mark; make sure we realign properly.
|
||||||
@ -266,6 +280,7 @@ public void testIncompleteOverread() throws Exception {
|
|||||||
RECORD3);
|
RECORD3);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testSeekToRecord() throws Exception {
|
public void testSeekToRecord() throws Exception {
|
||||||
// Seek past the first two records and read the third.
|
// Seek past the first two records and read the third.
|
||||||
|
|
||||||
@ -342,6 +357,7 @@ private void verifyNextRecord(LobFile.Reader reader, long expectedId,
|
|||||||
assertEquals(expectedRecord, finalRecord);
|
assertEquals(expectedRecord, finalRecord);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testManySeeks() throws Exception {
|
public void testManySeeks() throws Exception {
|
||||||
// Test that we can do gymnastics with seeking between records.
|
// Test that we can do gymnastics with seeking between records.
|
||||||
|
|
||||||
@ -505,6 +521,7 @@ private void verifyBlobRecords(Path p, int numRecords,
|
|||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBinaryRecords() throws Exception {
|
public void testBinaryRecords() throws Exception {
|
||||||
// Write a BLOB file and read it all back.
|
// Write a BLOB file and read it all back.
|
||||||
|
|
||||||
@ -523,6 +540,7 @@ public void testBinaryRecords() throws Exception {
|
|||||||
verifyBlobRecords(p, NUM_RECORDS, RECORD_LEN, RECORD_LEN);
|
verifyBlobRecords(p, NUM_RECORDS, RECORD_LEN, RECORD_LEN);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testOverLengthBinaryRecord() throws Exception {
|
public void testOverLengthBinaryRecord() throws Exception {
|
||||||
// Write a record with a declared length shorter than the
|
// Write a record with a declared length shorter than the
|
||||||
// actual length, and read it back.
|
// actual length, and read it back.
|
||||||
@ -556,6 +574,7 @@ private void runCompressedTest(String codec) throws Exception {
|
|||||||
runClobFileTest(p, codec, records);
|
runClobFileTest(p, codec, records);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testCompressedFile() throws Exception {
|
public void testCompressedFile() throws Exception {
|
||||||
// Test all the various compression codecs.
|
// Test all the various compression codecs.
|
||||||
|
|
||||||
@ -564,15 +583,13 @@ public void testCompressedFile() throws Exception {
|
|||||||
runCompressedTest(CodecMap.NONE);
|
runCompressedTest(CodecMap.NONE);
|
||||||
runCompressedTest(CodecMap.DEFLATE);
|
runCompressedTest(CodecMap.DEFLATE);
|
||||||
|
|
||||||
try {
|
thrown.expect(UnsupportedCodecException.class);
|
||||||
// We expect this to throw UnsupportedCodecException
|
|
||||||
// because this class is not included in our package.
|
|
||||||
runCompressedTest(CodecMap.LZO);
|
runCompressedTest(CodecMap.LZO);
|
||||||
fail("Expected unsupported codec exception for lzo");
|
|
||||||
} catch (UnsupportedCodecException uce) {
|
|
||||||
// We pass.
|
|
||||||
LOG.info("Got unsupported codec exception for lzo; expected -- good.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestLobFile.class);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
|
|
||||||
package org.apache.sqoop.hcat;
|
package org.apache.sqoop.hcat;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -25,14 +26,23 @@
|
|||||||
import com.cloudera.sqoop.SqoopOptions;
|
import com.cloudera.sqoop.SqoopOptions;
|
||||||
import com.cloudera.sqoop.tool.ExportTool;
|
import com.cloudera.sqoop.tool.ExportTool;
|
||||||
import com.cloudera.sqoop.tool.ImportTool;
|
import com.cloudera.sqoop.tool.ImportTool;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test basic HCatalog related features.
|
* Test basic HCatalog related features.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestHCatalogBasic extends TestCase {
|
public class TestHCatalogBasic extends TestCase {
|
||||||
private static ImportTool importTool;
|
private static ImportTool importTool;
|
||||||
private static ExportTool exportTool;
|
private static ExportTool exportTool;
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
@Override
|
@Override
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
@ -49,6 +59,7 @@ private SqoopOptions parseExportArgs(String[] argv) throws Exception {
|
|||||||
return opts;
|
return opts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatalogHomeWithImport() throws Exception {
|
public void testHCatalogHomeWithImport() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-home",
|
"--hcatalog-home",
|
||||||
@ -58,6 +69,7 @@ public void testHCatalogHomeWithImport() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatalogHomeWithExport() throws Exception {
|
public void testHCatalogHomeWithExport() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-home",
|
"--hcatalog-home",
|
||||||
@ -67,6 +79,7 @@ public void testHCatalogHomeWithExport() throws Exception {
|
|||||||
SqoopOptions opts = parseExportArgs(args);
|
SqoopOptions opts = parseExportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatalogImport() throws Exception {
|
public void testHCatalogImport() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -76,6 +89,7 @@ public void testHCatalogImport() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatalogExport() throws Exception {
|
public void testHCatalogExport() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -85,6 +99,7 @@ public void testHCatalogExport() throws Exception {
|
|||||||
SqoopOptions opts = parseExportArgs(args);
|
SqoopOptions opts = parseExportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithTargetDir() throws Exception {
|
public void testHCatImportWithTargetDir() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -96,15 +111,13 @@ public void testHCatImportWithTargetDir() throws Exception {
|
|||||||
"--target-dir",
|
"--target-dir",
|
||||||
"/target/dir",
|
"/target/dir",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithWarehouseDir() throws Exception {
|
public void testHCatImportWithWarehouseDir() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -116,15 +129,13 @@ public void testHCatImportWithWarehouseDir() throws Exception {
|
|||||||
"--warehouse-dir",
|
"--warehouse-dir",
|
||||||
"/target/dir",
|
"/target/dir",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithHiveImport() throws Exception {
|
public void testHCatImportWithHiveImport() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -135,15 +146,13 @@ public void testHCatImportWithHiveImport() throws Exception {
|
|||||||
"table",
|
"table",
|
||||||
"--hive-import",
|
"--hive-import",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatExportWithExportDir() throws Exception {
|
public void testHCatExportWithExportDir() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -155,15 +164,13 @@ public void testHCatExportWithExportDir() throws Exception {
|
|||||||
"--export-dir",
|
"--export-dir",
|
||||||
"/export/dir",
|
"/export/dir",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseExportArgs(args);
|
SqoopOptions opts = parseExportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
exportTool.validateOptions(opts);
|
exportTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatExportWithParquetFile() throws Exception {
|
public void testHCatExportWithParquetFile() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -174,15 +181,13 @@ public void testHCatExportWithParquetFile() throws Exception {
|
|||||||
"table",
|
"table",
|
||||||
"--as-parquetfile",
|
"--as-parquetfile",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseExportArgs(args);
|
SqoopOptions opts = parseExportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
exportTool.validateOptions(opts);
|
exportTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithSequenceFile() throws Exception {
|
public void testHCatImportWithSequenceFile() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -193,15 +198,13 @@ public void testHCatImportWithSequenceFile() throws Exception {
|
|||||||
"table",
|
"table",
|
||||||
"--as-sequencefile",
|
"--as-sequencefile",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithParquetFile() throws Exception {
|
public void testHCatImportWithParquetFile() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -215,15 +218,13 @@ public void testHCatImportWithParquetFile() throws Exception {
|
|||||||
"table",
|
"table",
|
||||||
"--as-parquetfile",
|
"--as-parquetfile",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithAvroFile() throws Exception {
|
public void testHCatImportWithAvroFile() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -234,14 +235,13 @@ public void testHCatImportWithAvroFile() throws Exception {
|
|||||||
"table",
|
"table",
|
||||||
"--as-avrodatafile",
|
"--as-avrodatafile",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithCreateTable() throws Exception {
|
public void testHCatImportWithCreateTable() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -251,6 +251,7 @@ public void testHCatImportWithCreateTable() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithDropAndCreateTable() throws Exception {
|
public void testHCatImportWithDropAndCreateTable() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -265,6 +266,7 @@ public void testHCatImportWithDropAndCreateTable() throws Exception {
|
|||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithCreateTableAndDropAndCreateTable()
|
public void testHCatImportWithCreateTableAndDropAndCreateTable()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
@ -278,14 +280,12 @@ public void testHCatImportWithCreateTableAndDropAndCreateTable()
|
|||||||
"--drop-and-create-hcatalog-table",
|
"--drop-and-create-hcatalog-table",
|
||||||
};
|
};
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
try {
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithStorageStanza() throws Exception {
|
public void testHCatImportWithStorageStanza() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -296,6 +296,7 @@ public void testHCatImportWithStorageStanza() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithDatabase() throws Exception {
|
public void testHCatImportWithDatabase() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -306,6 +307,7 @@ public void testHCatImportWithDatabase() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithPartKeys() throws Exception {
|
public void testHCatImportWithPartKeys() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--hcatalog-table",
|
"--hcatalog-table",
|
||||||
@ -318,6 +320,7 @@ public void testHCatImportWithPartKeys() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithOnlyHCatKeys() throws Exception {
|
public void testHCatImportWithOnlyHCatKeys() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -329,15 +332,13 @@ public void testHCatImportWithOnlyHCatKeys() throws Exception {
|
|||||||
"--hcatalog-partition-keys",
|
"--hcatalog-partition-keys",
|
||||||
"k1,k2",
|
"k1,k2",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithMismatchedKeysAndVals() throws Exception {
|
public void testHCatImportWithMismatchedKeysAndVals() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -351,14 +352,13 @@ public void testHCatImportWithMismatchedKeysAndVals() throws Exception {
|
|||||||
"--hcatalog-partition-values",
|
"--hcatalog-partition-values",
|
||||||
"v1",
|
"v1",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithEmptyKeysAndVals() throws Exception {
|
public void testHCatImportWithEmptyKeysAndVals() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -372,14 +372,13 @@ public void testHCatImportWithEmptyKeysAndVals() throws Exception {
|
|||||||
"--hcatalog-partition-values",
|
"--hcatalog-partition-values",
|
||||||
",v1",
|
",v1",
|
||||||
};
|
};
|
||||||
try {
|
|
||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
|
|
||||||
|
thrown.expect(SqoopOptions.InvalidOptionsException.class);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
fail("Expected InvalidOptionsException");
|
|
||||||
} catch (SqoopOptions.InvalidOptionsException ioe) {
|
|
||||||
// expected.
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testHCatImportWithBothHCatAndHivePartOptions() throws Exception {
|
public void testHCatImportWithBothHCatAndHivePartOptions() throws Exception {
|
||||||
String[] args = {
|
String[] args = {
|
||||||
"--connect",
|
"--connect",
|
||||||
@ -400,4 +399,9 @@ public void testHCatImportWithBothHCatAndHivePartOptions() throws Exception {
|
|||||||
SqoopOptions opts = parseImportArgs(args);
|
SqoopOptions opts = parseImportArgs(args);
|
||||||
importTool.validateOptions(opts);
|
importTool.validateOptions(opts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestHCatalogBasic.class);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,15 +23,24 @@
|
|||||||
|
|
||||||
import com.cloudera.sqoop.mapreduce.db.TextSplitter;
|
import com.cloudera.sqoop.mapreduce.db.TextSplitter;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
import junit.framework.Test;
|
|
||||||
import org.apache.sqoop.validation.ValidationException;
|
import org.apache.sqoop.validation.ValidationException;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test that the TextSplitter implementation creates a sane set of splits.
|
* Test that the TextSplitter implementation creates a sane set of splits.
|
||||||
*/
|
*/
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TestTextSplitter extends TestCase {
|
public class TestTextSplitter extends TestCase {
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
public String formatArray(Object [] ar) {
|
public String formatArray(Object [] ar) {
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
sb.append("[");
|
sb.append("[");
|
||||||
@ -70,30 +79,35 @@ public void assertArrayEquals(Object [] expected, Object [] actual) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testStringConvertEmpty() {
|
public void testStringConvertEmpty() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
|
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
|
||||||
assertEquals(BigDecimal.ZERO, emptyBigDec);
|
assertEquals(BigDecimal.ZERO, emptyBigDec);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBigDecConvertEmpty() {
|
public void testBigDecConvertEmpty() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
|
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
|
||||||
assertEquals("", emptyStr);
|
assertEquals("", emptyStr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertA() {
|
public void testConvertA() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
|
||||||
assertEquals("A", out);
|
assertEquals("A", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertZ() {
|
public void testConvertZ() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
|
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
|
||||||
assertEquals("Z", out);
|
assertEquals("Z", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertThreeChars() {
|
public void testConvertThreeChars() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(
|
String out = splitter.bigDecimalToString(
|
||||||
@ -101,6 +115,7 @@ public void testConvertThreeChars() {
|
|||||||
assertEquals("abc", out);
|
assertEquals("abc", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertStr() {
|
public void testConvertStr() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(
|
String out = splitter.bigDecimalToString(
|
||||||
@ -108,6 +123,7 @@ public void testConvertStr() {
|
|||||||
assertEquals("big str", out);
|
assertEquals("big str", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testConvertChomped() {
|
public void testConvertChomped() {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
String out = splitter.bigDecimalToString(
|
String out = splitter.bigDecimalToString(
|
||||||
@ -115,6 +131,7 @@ public void testConvertChomped() {
|
|||||||
assertEquals("AVeryLon", out);
|
assertEquals("AVeryLon", out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testAlphabetSplit() throws SQLException, ValidationException {
|
public void testAlphabetSplit() throws SQLException, ValidationException {
|
||||||
// This should give us 25 splits, one per letter.
|
// This should give us 25 splits, one per letter.
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
@ -125,17 +142,15 @@ public void testAlphabetSplit() throws SQLException, ValidationException {
|
|||||||
assertArrayEquals(expected, splits.toArray(new String [0]));
|
assertArrayEquals(expected, splits.toArray(new String [0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAlphabetSplitWhenMinStringGreaterThanMaxString() throws SQLException {
|
@Test
|
||||||
|
public void testAlphabetSplitWhenMinStringGreaterThanMaxString() throws SQLException, ValidationException {
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
try {
|
|
||||||
|
thrown.expect(ValidationException.class);
|
||||||
splitter.split(4, "Z", "A", "");
|
splitter.split(4, "Z", "A", "");
|
||||||
fail();
|
|
||||||
} catch (ValidationException e) {
|
|
||||||
// expected
|
|
||||||
assertTrue(true);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testCommonPrefix() throws SQLException, ValidationException {
|
public void testCommonPrefix() throws SQLException, ValidationException {
|
||||||
// Splits between 'Hand' and 'Hardy'
|
// Splits between 'Hand' and 'Hardy'
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
@ -148,6 +163,7 @@ public void testCommonPrefix() throws SQLException, ValidationException {
|
|||||||
assertEquals(6, splits.size());
|
assertEquals(6, splits.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testNChar() throws SQLException {
|
public void testNChar() throws SQLException {
|
||||||
// Splits between 'Hand' and 'Hardy'
|
// Splits between 'Hand' and 'Hardy'
|
||||||
NTextSplitter splitter = new NTextSplitter();
|
NTextSplitter splitter = new NTextSplitter();
|
||||||
@ -156,4 +172,9 @@ public void testNChar() throws SQLException {
|
|||||||
assertEquals(false, splitter2.isUseNCharStrings());
|
assertEquals(false, splitter2.isUseNCharStrings());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TestTextSplitter.class);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import junit.framework.JUnit4TestAdapter;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.InputSplit;
|
import org.apache.hadoop.mapreduce.InputSplit;
|
||||||
import org.apache.hadoop.mapreduce.Job;
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
@ -29,24 +30,33 @@
|
|||||||
import com.cloudera.sqoop.testutil.MockResultSet;
|
import com.cloudera.sqoop.testutil.MockResultSet;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
import org.junit.Rule;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.junit.runners.JUnit4;
|
||||||
|
|
||||||
|
@RunWith(JUnit4.class)
|
||||||
public class TextSplitterHadoopConfIntegrationTest extends TestCase {
|
public class TextSplitterHadoopConfIntegrationTest extends TestCase {
|
||||||
private static final String TEXT_COL_NAME = "text_col_name";
|
private static final String TEXT_COL_NAME = "text_col_name";
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testDefaultValueOfUnsetBooleanParam() throws Exception {
|
public void testDefaultValueOfUnsetBooleanParam() throws Exception {
|
||||||
Configuration conf = Job.getInstance().getConfiguration();
|
Configuration conf = Job.getInstance().getConfiguration();
|
||||||
TextSplitter splitter = new TextSplitter();
|
TextSplitter splitter = new TextSplitter();
|
||||||
ResultSet rs = new MockResultSet();
|
ResultSet rs = new MockResultSet();
|
||||||
try {
|
|
||||||
|
String containedByExpectedExceptionMessage = TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY;
|
||||||
|
|
||||||
|
thrown.expect(ValidationException.class);
|
||||||
|
thrown.expectMessage(containedByExpectedExceptionMessage);
|
||||||
splitter.split(conf, rs, TEXT_COL_NAME);
|
splitter.split(conf, rs, TEXT_COL_NAME);
|
||||||
fail();
|
|
||||||
} catch (ValidationException e) {
|
|
||||||
// expected to throw ValidationException with the a message about the
|
|
||||||
// "i-know-what-i-am-doing" prop
|
|
||||||
assertTrue(e.getMessage().contains(TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
public void testBooleanParamValue() throws Exception {
|
public void testBooleanParamValue() throws Exception {
|
||||||
Configuration conf = Job.getInstance().getConfiguration();
|
Configuration conf = Job.getInstance().getConfiguration();
|
||||||
conf.set(TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY, "true");
|
conf.set(TextSplitter.ALLOW_TEXT_SPLITTER_PROPERTY, "true");
|
||||||
@ -55,5 +65,10 @@ public void testBooleanParamValue() throws Exception {
|
|||||||
List<InputSplit> splits = splitter.split(conf, rs, TEXT_COL_NAME);
|
List<InputSplit> splits = splitter.split(conf, rs, TEXT_COL_NAME);
|
||||||
assertFalse(splits.isEmpty());
|
assertFalse(splits.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//workaround: ant kept falling back to JUnit3
|
||||||
|
public static junit.framework.Test suite() {
|
||||||
|
return new JUnit4TestAdapter(TextSplitterHadoopConfIntegrationTest.class);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user