mirror of
https://github.com/apache/sqoop.git
synced 2025-05-07 04:40:28 +08:00
SQOOP-2021: Sqoop2: Generic JDBC Connector extractor uses wrong nulls
(Veena Basavaraj via Abraham Elmahrek)
This commit is contained in:
parent
4ac3c3862b
commit
cc61ed5344
@ -53,8 +53,6 @@ public final class GenericJdbcConnectorConstants {
|
|||||||
|
|
||||||
public static final String SUBQUERY_ALIAS = "SQOOP_SUBQUERY_ALIAS";
|
public static final String SUBQUERY_ALIAS = "SQOOP_SUBQUERY_ALIAS";
|
||||||
|
|
||||||
public static final String SQL_NULL_VALUE = "null";
|
|
||||||
|
|
||||||
private GenericJdbcConnectorConstants() {
|
private GenericJdbcConnectorConstants() {
|
||||||
// Disable explicit object creation
|
// Disable explicit object creation
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.ResultSetMetaData;
|
import java.sql.ResultSetMetaData;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.apache.sqoop.common.SqoopException;
|
import org.apache.sqoop.common.SqoopException;
|
||||||
@ -68,12 +67,12 @@ public void extract(ExtractorContext context, LinkConfiguration linkConfig,
|
|||||||
while (resultSet.next()) {
|
while (resultSet.next()) {
|
||||||
Object[] array = new Object[columnCount];
|
Object[] array = new Object[columnCount];
|
||||||
for (int i = 0; i < columnCount; i++) {
|
for (int i = 0; i < columnCount; i++) {
|
||||||
// check type of the column
|
|
||||||
Column schemaColumn = schemaColumns[i];
|
|
||||||
if(resultSet.getObject(i + 1) == null) {
|
if(resultSet.getObject(i + 1) == null) {
|
||||||
array[i] = GenericJdbcConnectorConstants.SQL_NULL_VALUE ;
|
array[i] = null ;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// check type of the column
|
||||||
|
Column schemaColumn = schemaColumns[i];
|
||||||
switch (schemaColumn.getType()) {
|
switch (schemaColumn.getType()) {
|
||||||
case DATE:
|
case DATE:
|
||||||
// convert the sql date to JODA time as prescribed the Sqoop IDF spec
|
// convert the sql date to JODA time as prescribed the Sqoop IDF spec
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.sqoop.connector.jdbc;
|
package org.apache.sqoop.connector.jdbc;
|
||||||
|
|
||||||
|
import static org.testng.AssertJUnit.assertNull;
|
||||||
import org.apache.sqoop.common.MutableContext;
|
import org.apache.sqoop.common.MutableContext;
|
||||||
import org.apache.sqoop.common.MutableMapContext;
|
import org.apache.sqoop.common.MutableMapContext;
|
||||||
import org.apache.sqoop.common.SqoopException;
|
import org.apache.sqoop.common.SqoopException;
|
||||||
@ -42,6 +43,7 @@
|
|||||||
public class TestExtractor {
|
public class TestExtractor {
|
||||||
|
|
||||||
private final String tableName;
|
private final String tableName;
|
||||||
|
private final String nullDataTableName;
|
||||||
|
|
||||||
private GenericJdbcExecutor executor;
|
private GenericJdbcExecutor executor;
|
||||||
|
|
||||||
@ -52,6 +54,7 @@ public class TestExtractor {
|
|||||||
|
|
||||||
public TestExtractor() {
|
public TestExtractor() {
|
||||||
tableName = getClass().getSimpleName().toUpperCase();
|
tableName = getClass().getSimpleName().toUpperCase();
|
||||||
|
nullDataTableName = getClass().getSimpleName().toUpperCase() + "NULL";
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeMethod(alwaysRun = true)
|
@BeforeMethod(alwaysRun = true)
|
||||||
@ -174,14 +177,50 @@ public void testIncorrectSchemaColumnSize() throws Exception {
|
|||||||
"SELECT SQOOP_SUBQUERY_ALIAS.ICOL,SQOOP_SUBQUERY_ALIAS.VCOL FROM " + "(SELECT * FROM "
|
"SELECT SQOOP_SUBQUERY_ALIAS.ICOL,SQOOP_SUBQUERY_ALIAS.VCOL FROM " + "(SELECT * FROM "
|
||||||
+ executor.delimitIdentifier(tableName) + " WHERE ${CONDITIONS}) SQOOP_SUBQUERY_ALIAS");
|
+ executor.delimitIdentifier(tableName) + " WHERE ${CONDITIONS}) SQOOP_SUBQUERY_ALIAS");
|
||||||
|
|
||||||
GenericJdbcPartition partition;
|
GenericJdbcPartition partition = new GenericJdbcPartition();
|
||||||
|
|
||||||
Extractor extractor = new GenericJdbcExtractor();
|
Extractor extractor = new GenericJdbcExtractor();
|
||||||
DummyWriter writer = new DummyWriter();
|
DummyWriter writer = new DummyWriter();
|
||||||
Schema schema = new Schema("TestIncorrectColumns");
|
Schema schema = new Schema("TestIncorrectColumns");
|
||||||
ExtractorContext extractorContext = new ExtractorContext(context, writer, schema);
|
ExtractorContext extractorContext = new ExtractorContext(context, writer, schema);
|
||||||
|
|
||||||
partition = new GenericJdbcPartition();
|
partition.setConditions("-50 <= ICOL AND ICOL < -16");
|
||||||
|
extractor.extract(extractorContext, linkConfig, jobConfig, partition);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNullValueExtracted() throws Exception {
|
||||||
|
|
||||||
|
if (!executor.existTable(nullDataTableName)) {
|
||||||
|
executor.executeUpdate("CREATE TABLE " + executor.delimitIdentifier(nullDataTableName)
|
||||||
|
+ "(ICOL INTEGER PRIMARY KEY, DCOL DOUBLE, VCOL VARCHAR(20), DATECOL DATE)");
|
||||||
|
|
||||||
|
for (int i = 0; i < NUMBER_OF_ROWS; i++) {
|
||||||
|
int value = i;
|
||||||
|
String sql = "INSERT INTO " + executor.delimitIdentifier(nullDataTableName) + " VALUES(" + value + ",null,null,null)";
|
||||||
|
executor.executeUpdate(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
MutableContext context = new MutableMapContext();
|
||||||
|
|
||||||
|
LinkConfiguration linkConfig = new LinkConfiguration();
|
||||||
|
|
||||||
|
linkConfig.linkConfig.jdbcDriver = GenericJdbcTestConstants.DRIVER;
|
||||||
|
linkConfig.linkConfig.connectionString = GenericJdbcTestConstants.URL;
|
||||||
|
|
||||||
|
FromJobConfiguration jobConfig = new FromJobConfiguration();
|
||||||
|
context.setString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_FROM_DATA_SQL,
|
||||||
|
"SELECT * FROM " + executor.delimitIdentifier(nullDataTableName) + " WHERE ${CONDITIONS}");
|
||||||
|
|
||||||
|
Extractor extractor = new GenericJdbcExtractor();
|
||||||
|
DummyNullDataWriter writer = new DummyNullDataWriter();
|
||||||
|
Schema schema = new Schema("TestExtractor");
|
||||||
|
schema.addColumn(new FixedPoint("c1",2L, true)).addColumn(new Decimal("c2")).addColumn(new Text("c3")).addColumn(new Date("c4"));
|
||||||
|
|
||||||
|
ExtractorContext extractorContext = new ExtractorContext(context, writer, schema);
|
||||||
|
|
||||||
|
GenericJdbcPartition partition = new GenericJdbcPartition();
|
||||||
partition.setConditions("-50 <= ICOL AND ICOL < -16");
|
partition.setConditions("-50 <= ICOL AND ICOL < -16");
|
||||||
extractor.extract(extractorContext, linkConfig, jobConfig, partition);
|
extractor.extract(extractorContext, linkConfig, jobConfig, partition);
|
||||||
|
|
||||||
@ -219,4 +258,27 @@ public void writeRecord(Object content) {
|
|||||||
fail("This method should not be invoked.");
|
fail("This method should not be invoked.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public class DummyNullDataWriter extends DataWriter {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeArrayRecord(Object[] array) {
|
||||||
|
for (int i = 0; i < array.length; i++) {
|
||||||
|
// primary key cant be null
|
||||||
|
if (i > 0) {
|
||||||
|
assertNull(array[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeStringRecord(String text) {
|
||||||
|
fail("This method should not be invoked.");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeRecord(Object content) {
|
||||||
|
fail("This method should not be invoked.");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user