diff --git a/build.xml b/build.xml index 21fd4063..1be4e464 100644 --- a/build.xml +++ b/build.xml @@ -125,9 +125,9 @@ - - - + + + @@ -150,7 +150,7 @@ - + diff --git a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java index a93114f6..ac8758bf 100644 --- a/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java +++ b/src/java/org/apache/sqoop/mapreduce/HBaseImportJob.java @@ -155,10 +155,25 @@ protected void jobSetup(Job job) throws IOException, ImportException { throw new ImportException( "Import to HBase error: Column family not specified"); } + Method m = null; + try { + m = HBaseConfiguration.class.getMethod("merge", + Configuration.class, Configuration.class); + } catch (NoSuchMethodException nsme) { + } - // Add HBase configuration files to this conf object. - Configuration newConf = HBaseConfiguration.create(conf); - HBaseConfiguration.merge(conf, newConf); + if (m != null) { + // Add HBase configuration files to this conf object. + + Configuration newConf = HBaseConfiguration.create(conf); + try { + m.invoke(null, conf, newConf); + } catch (Exception e) { + throw new ImportException(e); + } + } else { + HBaseConfiguration.addHbaseResources(conf); + } HBaseAdmin admin = new HBaseAdmin(conf); diff --git a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java index 613ee7ad..06141549 100644 --- a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java +++ b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetFTPRecordReader.java @@ -201,7 +201,7 @@ public void setUp() throws IOException { conf.setClass(DBConfiguration.INPUT_CLASS_PROPERTY, DummySqoopRecord.class, DBWritable.class); - Job job = Job.getInstance(conf); + Job job = new Job(conf); mfDIS = new MainframeDatasetInputSplit(); mfDIS.addDataset("test1"); mfDIS.addDataset("test2"); diff --git a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java index 70958e09..e386fb0b 100644 --- a/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java +++ b/src/test/org/apache/sqoop/mapreduce/mainframe/TestMainframeDatasetInputFormat.java @@ -96,7 +96,7 @@ public void testRetrieveDatasets() throws IOException { String dsName = "dsName1"; conf.set(MainframeConfiguration.MAINFRAME_INPUT_DATASET_NAME, dsName); - Job job = Job.getInstance(conf); + Job job = new Job(conf); format.getSplits(job); List splits = new ArrayList(); diff --git a/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java b/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java index b8c4538e..924c1163 100644 --- a/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java +++ b/src/test/org/apache/sqoop/mapreduce/sqlserver/SqlServerUpsertOutputFormatTest.java @@ -1,13 +1,15 @@ package org.apache.sqoop.mapreduce.sqlserver; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.lang.reflect.Constructor; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.lib.db.DBConfiguration; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; -import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.sqoop.manager.SQLServerManager; import org.apache.sqoop.mapreduce.ExportJobBase; import org.apache.sqoop.mapreduce.sqlserver.SqlServerUpsertOutputFormat.SqlServerUpsertRecordWriter; @@ -19,7 +21,8 @@ public class SqlServerUpsertOutputFormatTest { @Test public void Merge_statement_is_parameterized_correctly() throws Exception { Configuration conf = new Configuration(); - conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, org.hsqldb.jdbcDriver.class.getName()); + conf.set(DBConfiguration.DRIVER_CLASS_PROPERTY, + org.hsqldb.jdbcDriver.class.getName()); conf.set(DBConfiguration.URL_PROPERTY, "jdbc:hsqldb:."); conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, ""); conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, ""); @@ -27,18 +30,49 @@ public void Merge_statement_is_parameterized_correctly() throws Exception { String[] columnNames = { "FirstColumn", "SecondColumn", "ThirdColumn" }; String[] updateKeyColumns = { "FirstColumn" }; conf.set(DBConfiguration.OUTPUT_TABLE_NAME_PROPERTY, tableName); - conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, StringUtils.join(columnNames, ',')); - conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, StringUtils.join(updateKeyColumns, ',')); + conf.set(DBConfiguration.OUTPUT_FIELD_NAMES_PROPERTY, + StringUtils.join(columnNames, ',')); + conf.set(ExportJobBase.SQOOP_EXPORT_UPDATE_COL_KEY, + StringUtils.join(updateKeyColumns, ',')); conf.set(SQLServerManager.TABLE_HINTS_PROP, "NOLOCK"); conf.set(SQLServerManager.IDENTITY_INSERT_PROP, "true"); - TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID()); - SqlServerUpsertOutputFormat outputFormat = new SqlServerUpsertOutputFormat(); - SqlServerUpsertRecordWriter recordWriter = outputFormat.new SqlServerUpsertRecordWriter(context); - assertEquals("SET IDENTITY_INSERT #myTable ON " + - "MERGE INTO #myTable AS _target USING ( VALUES ( ?, ?, ? ) ) AS _source ( FirstColumn, SecondColumn, ThirdColumn ) ON _source.FirstColumn = _target.FirstColumn" + - " WHEN MATCHED THEN UPDATE SET _target.SecondColumn = _source.SecondColumn, _target.ThirdColumn = _source.ThirdColumn" + - " WHEN NOT MATCHED THEN INSERT ( FirstColumn, SecondColumn, ThirdColumn ) VALUES " + - "( _source.FirstColumn, _source.SecondColumn, _source.ThirdColumn ) " + - "OPTION (NOLOCK);", recordWriter.getUpdateStatement()); + TaskAttemptContext context = null; + Class cls = null; + try { + cls = + Class + .forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl"); + } + catch(ClassNotFoundException cnfe) { + // Not hadoop 2.0 + } + if (cls == null) { + try { + cls = + Class + .forName("org.apache.hadoop.mapreduce.task.TaskAttemptContext"); + } + catch(ClassNotFoundException cnfe) { + // Something wrong + } + } + assertNotNull(cls); + Constructor c = cls.getConstructor(Configuration.class, + TaskAttemptID.class); + context = (TaskAttemptContext)c.newInstance(conf, new TaskAttemptID()); + SqlServerUpsertOutputFormat outputFormat = + new SqlServerUpsertOutputFormat(); + SqlServerUpsertRecordWriter recordWriter = + outputFormat.new SqlServerUpsertRecordWriter(context); + assertEquals("SET IDENTITY_INSERT #myTable ON " + + "MERGE INTO #myTable AS _target USING ( VALUES ( ?, ?, ? ) )" + + " AS _source ( FirstColumn, SecondColumn, ThirdColumn ) ON " + + "_source.FirstColumn = _target.FirstColumn" + + " WHEN MATCHED THEN UPDATE SET _target.SecondColumn = " + + "_source.SecondColumn, _target.ThirdColumn = _source.ThirdColumn" + + " WHEN NOT MATCHED THEN INSERT ( FirstColumn, SecondColumn, " + + " ThirdColumn ) VALUES " + + "( _source.FirstColumn, _source.SecondColumn, _source.ThirdColumn ) " + + "OPTION (NOLOCK);", recordWriter.getUpdateStatement()); } }