diff --git a/ivy.xml b/ivy.xml index af4f0118..11156131 100644 --- a/ivy.xml +++ b/ivy.xml @@ -68,9 +68,9 @@ under the License. - - diff --git a/ivy/libraries.properties b/ivy/libraries.properties index ec0d2e99..a4589150 100644 --- a/ivy/libraries.properties +++ b/ivy/libraries.properties @@ -29,10 +29,10 @@ commons-lang.version=2.4 commons-logging.version=1.0.4 # Cloudera Distribution dependency version -hadoop-core.cloudera.version=0.20.2-737 +hadoop-core.cloudera.version=0.20.2-cdh3u1 -hbase.version=0.89.20100924-28 -zookeeper.version=3.3.1+7 +hbase.version=0.90.3-cdh3u1 +zookeeper.version=3.3.3-cdh3u1 hsqldb.version=1.8.0.10 diff --git a/pom.xml b/pom.xml index 5ce184b5..da0def82 100644 --- a/pom.xml +++ b/pom.xml @@ -82,9 +82,9 @@ limitations under the License. 0.20.203.0 --> - 0.20.2-737 + 0.20.2-cdh3u1 - 0.90.3 + 0.90.3-cdh3u1 @@ -107,22 +107,90 @@ limitations under the License. org.apache.avro avro ${avroVersion} + + + org.slf4j + slf4j-api + + + org.mortbay.jetty + jetty + + + org.jboss.netty + netty + + + org.apache.velocity + velocity + + org.apache.avro avro-mapred ${avroVersion} + + + org.slf4j + slf4j-api + + + org.mortbay.jetty + jetty + + + org.jboss.netty + netty + + + org.apache.velocity + velocity + + - com.cloudera.hadoop + org.apache.hadoop hadoop-core ${hadoopVersion} + + + org.codehaus.jackson + jackson-core-asl + + org.apache.hbase hbase ${hbaseVersion} + + + org.apache.avro + avro + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-json + + + com.sun.jersey + jersey-server + + + org.apache.thrift + thrift + + + log4j + log4j + + @@ -143,7 +211,7 @@ limitations under the License. FIXME we must depends on org.apache ! org.apache.hadoop --> - com.cloudera.hadoop + org.apache.hadoop hadoop-test ${hadoopVersion} test diff --git a/src/java/com/cloudera/sqoop/util/AppendUtils.java b/src/java/com/cloudera/sqoop/util/AppendUtils.java index 219ba3d7..2a516163 100644 --- a/src/java/com/cloudera/sqoop/util/AppendUtils.java +++ b/src/java/com/cloudera/sqoop/util/AppendUtils.java @@ -42,7 +42,8 @@ public class AppendUtils { private static final SimpleDateFormat DATE_FORM = new SimpleDateFormat( "ddHHmmssSSS"); - private static final String TEMP_IMPORT_ROOT = System.getProperty( "sqoop.test.import.rootDir", "_sqoop"); + private static final String TEMP_IMPORT_ROOT = + System.getProperty("sqoop.test.import.rootDir", "_sqoop"); private static final int PARTITION_DIGITS = 5; private static final String FILEPART_SEPARATOR = "-"; diff --git a/src/test/com/cloudera/sqoop/TestAvroImport.java b/src/test/com/cloudera/sqoop/TestAvroImport.java index c4d4d205..abbc05d1 100644 --- a/src/test/com/cloudera/sqoop/TestAvroImport.java +++ b/src/test/com/cloudera/sqoop/TestAvroImport.java @@ -93,13 +93,17 @@ public void testAvroImport() throws IOException { assertEquals("INTFIELD1", fields.get(0).name()); assertEquals(Schema.Type.UNION, fields.get(0).schema().getType()); - assertEquals(Schema.Type.INT, fields.get(0).schema().getTypes().get(0).getType()); - assertEquals(Schema.Type.NULL, fields.get(0).schema().getTypes().get(1).getType()); + assertEquals(Schema.Type.INT, + fields.get(0).schema().getTypes().get(0).getType()); + assertEquals(Schema.Type.NULL, + fields.get(0).schema().getTypes().get(1).getType()); assertEquals("INTFIELD2", fields.get(1).name()); assertEquals(Schema.Type.UNION, fields.get(1).schema().getType()); - assertEquals(Schema.Type.INT, fields.get(1).schema().getTypes().get(0).getType()); - assertEquals(Schema.Type.NULL, fields.get(1).schema().getTypes().get(1).getType()); + assertEquals(Schema.Type.INT, + fields.get(1).schema().getTypes().get(0).getType()); + assertEquals(Schema.Type.NULL, + fields.get(1).schema().getTypes().get(1).getType()); GenericRecord record1 = reader.next(); assertEquals(1, record1.get("INTFIELD1")); diff --git a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java index c8a975ac..8f5e1901 100644 --- a/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java +++ b/src/test/com/cloudera/sqoop/hbase/HBaseTestCase.java @@ -47,6 +47,21 @@ */ public abstract class HBaseTestCase extends ImportJobTestCase { + /* + * This is to restore test.build.data system property which gets reset + * when HBase tests are run. Since other tests in Sqoop also depend upon + * this property, they can fail if are run subsequently in the same VM. + */ + private static String testBuildDataProperty = ""; + + private static void recordTestBuildDataProperty() { + testBuildDataProperty = System.getProperty("test.build.data", ""); + } + + private static void restoreTestBuidlDataProperty() { + System.setProperty("test.build.data", testBuildDataProperty); + } + public static final Log LOG = LogFactory.getLog( HBaseTestCase.class.getName()); @@ -105,6 +120,7 @@ private void startMaster() throws Exception { @Override @Before public void setUp() { + HBaseTestCase.recordTestBuildDataProperty(); try { startMaster(); } catch (Exception e) { @@ -133,7 +149,7 @@ public void tearDown() { LOG.warn("Error shutting down HBase minicluster: " + StringUtils.stringifyException(e)); } - + HBaseTestCase.restoreTestBuidlDataProperty(); super.tearDown(); } diff --git a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java b/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java index deff3e18..99079a3c 100644 --- a/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java +++ b/src/test/com/cloudera/sqoop/mapreduce/TestImportJob.java @@ -58,7 +58,8 @@ public void testFailedImportDueToIOException() throws IOException { Configuration conf = new Configuration(); - LogFactory.getLog( getClass() ).info( " getWarehouseDir() " + getWarehouseDir() ); + LogFactory.getLog(getClass()).info( + " getWarehouseDir() " + getWarehouseDir()); // Make the output dir exist so we know the job will fail via IOException. Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());