diff --git a/COMPILING.txt b/COMPILING.txt
index 5993d1db..ddbed30a 100644
--- a/COMPILING.txt
+++ b/COMPILING.txt
@@ -55,9 +55,9 @@ If you want to build everything (including the documentation), type
+ant package+. This will appear in the
+build/sqoop-(version)/+ directory.
-Sqoop is built against the latest Hadoop distribution available from Cloudera.
-These dependencies are obtained via IVY which downloads the necessary binaries
-from Cloudera maven repository.
+This version of Sqoop is built against Hadoop 0.23 available from Apache
+maven repository by default. These dependencies are obtained via IVY which
+downloads the necessary binaries.
== Testing Sqoop
@@ -274,3 +274,12 @@ will allow you to edit Sqoop sources in Eclipse with all the library
dependencies correctly resolved. To compile the jars, you should still
use ant.
+
+== Using a specific version of Hadoop
+
+Now Sqoop defaults to use Hadoop 0.23 available from Apache maven repository.
+To switch back to the previous version of Hadoop 0.20, for example, run:
+
+++++
+ant test -Dhadoopversion=20
+++++
diff --git a/build.xml b/build.xml
index 5e87a151..f8a6e0d2 100644
--- a/build.xml
+++ b/build.xml
@@ -24,6 +24,13 @@
xmlns:artifact="urn:maven-artifact-ant"
xmlns:ivy="antlib:org.apache.ivy.ant">
+
+
+
+
+
+
+
@@ -150,10 +157,22 @@
-->
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -184,13 +203,6 @@
-
-
-
-
-
-
-
@@ -1062,20 +1074,20 @@
-
+
-
+
-
+
@@ -1083,7 +1095,7 @@
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
sync="true" />
+ conf="hadoop${hadoopversion}test" />
diff --git a/ivy.xml b/ivy.xml
index d686c764..0a40c225 100644
--- a/ivy.xml
+++ b/ivy.xml
@@ -36,14 +36,13 @@ under the License.
-
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ rev="${hadoop-core.version}" conf="hadoop20->default"/>
+ rev="${hadoop-core.version}" conf="hadoop20test->default"/>
+ rev="${hbase.version}" conf="hbase->default">
@@ -121,9 +137,12 @@ under the License.
+
+
+
diff --git a/ivy/libraries.properties b/ivy/libraries.properties
index dbbcb3b9..19e2ed70 100644
--- a/ivy/libraries.properties
+++ b/ivy/libraries.properties
@@ -28,15 +28,14 @@ commons-io.version=1.4
commons-lang.version=2.4
commons-logging.version=1.0.4
-# Cloudera Distribution dependency version
-hadoop-core.cloudera.version=0.20.2-cdh3u1
-
+hadoop-core.version=0.20.2-cdh3u1
+hadoop-common.version=0.23.0-SNAPSHOT
hbase.version=0.90.3-cdh3u1
zookeeper.version=3.3.3-cdh3u1
hsqldb.version=1.8.0.10
-ivy.version=2.0.0-rc2
+ivy.version=2.1.0
junit.version=4.5
diff --git a/src/test/com/cloudera/sqoop/TestCompression.java b/src/test/com/cloudera/sqoop/TestCompression.java
index 0030373d..048b87d0 100644
--- a/src/test/com/cloudera/sqoop/TestCompression.java
+++ b/src/test/com/cloudera/sqoop/TestCompression.java
@@ -163,6 +163,7 @@ public void runTextCompressionTest(CompressionCodec codec, int expectedNum)
if (codec == null) {
codec = new GzipCodec();
+ ReflectionUtils.setConf(codec, getConf());
}
Path p = new Path(getDataFilePath().toString()
+ codec.getDefaultExtension());
diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
index 36d7f3d2..3f5899e9 100644
--- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
+++ b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.VersionInfo;
import org.junit.After;
import org.junit.Before;
@@ -120,6 +121,9 @@ private void startMaster() throws Exception {
@Override
@Before
public void setUp() {
+ if (!isHadoop20()) {
+ return;
+ }
HBaseTestCase.recordTestBuildDataProperty();
try {
startMaster();
@@ -143,6 +147,9 @@ public void shutdown() throws Exception {
@Override
@After
public void tearDown() {
+ if (!isHadoop20()) {
+ return;
+ }
try {
shutdown();
} catch (Exception e) {
@@ -173,4 +180,8 @@ protected void verifyHBaseCell(String tableName, String rowKey,
table.close();
}
}
+
+ protected boolean isHadoop20() {
+ return VersionInfo.getVersion().startsWith("0.20");
+ }
}
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
index d5e15e8b..2108e28f 100644
--- a/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseImport.java
@@ -29,6 +29,9 @@ public class TestHBaseImport extends HBaseTestCase {
@Test
public void testBasicUsage() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
// Create the HBase table in Sqoop as we run the job.
String [] argv = getArgv(true, "BasicUsage", "BasicColFam", true, null);
String [] types = { "INT", "INT" };
@@ -40,6 +43,9 @@ public void testBasicUsage() throws IOException {
@Test
public void testMissingTableFails() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
// Test that if the table doesn't exist, we fail unless we
// explicitly create the table.
String [] argv = getArgv(true, "MissingTable", "MissingFam", false, null);
@@ -56,6 +62,9 @@ public void testMissingTableFails() throws IOException {
@Test
public void testOverwriteSucceeds() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
// Test that we can create a table and then import immediately
// back on top of it without problem.
String [] argv = getArgv(true, "OverwriteT", "OverwriteF", true, null);
@@ -71,6 +80,9 @@ public void testOverwriteSucceeds() throws IOException {
@Test
public void testStrings() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
String [] argv = getArgv(true, "stringT", "stringF", true, null);
String [] types = { "INT", "VARCHAR(32)" };
String [] vals = { "0", "'abc'" };
@@ -81,6 +93,9 @@ public void testStrings() throws IOException {
@Test
public void testNulls() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
String [] argv = getArgv(true, "nullT", "nullF", true, null);
String [] types = { "INT", "INT", "INT" };
String [] vals = { "0", "42", "null" };
@@ -96,6 +111,9 @@ public void testNulls() throws IOException {
@Test
public void testExitFailure() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
String [] types = { "INT", "INT", "INT" };
String [] vals = { "0", "42", "43" };
createTableWithColTypes(types, vals);
diff --git a/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
index 74eff505..705dcae0 100644
--- a/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
+++ b/src/test/com/cloudera/sqoop/hbase/TestHBaseQueryImport.java
@@ -29,6 +29,9 @@ public class TestHBaseQueryImport extends HBaseTestCase {
@Test
public void testImportFromQuery() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
String [] types = { "INT", "INT", "INT" };
String [] vals = { "0", "42", "43" };
createTableWithColTypes(types, vals);
@@ -47,6 +50,9 @@ public void testImportFromQuery() throws IOException {
@Test
public void testExitFailure() throws IOException {
+ if (!isHadoop20()) {
+ return;
+ }
String [] types = { "INT", "INT", "INT" };
String [] vals = { "0", "42", "43" };
createTableWithColTypes(types, vals);
diff --git a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
index 7528d333..7b3ddead 100644
--- a/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
+++ b/src/test/com/cloudera/sqoop/mapreduce/db/TestDataDrivenDBInputFormat.java
@@ -35,6 +35,7 @@
import org.apache.hadoop.mapreduce.lib.db.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.VersionInfo;
/**
* Test aspects of DataDrivenDBInputFormat.
@@ -82,11 +83,17 @@ private void initialize(String driverClassName, String url)
}
public void setUp() throws Exception {
+ if (!isHadoop20()) {
+ return;
+ }
initialize(DRIVER_CLASS, DB_URL);
super.setUp();
}
public void tearDown() throws Exception {
+ if (!isHadoop20()) {
+ return;
+ }
super.tearDown();
shutdown();
}
@@ -165,6 +172,9 @@ public void map(Object k, Object v, Context c)
}
public void testDateSplits() throws Exception {
+ if (!isHadoop20()) {
+ return;
+ }
Statement s = connection.createStatement();
final String DATE_TABLE = "datetable";
final String COL = "foo";
@@ -219,4 +229,8 @@ public void testDateSplits() throws Exception {
s.close();
}
}
+
+ protected boolean isHadoop20() {
+ return VersionInfo.getVersion().startsWith("0.20");
+ }
}