mirror of
https://github.com/apache/sqoop.git
synced 2025-05-04 04:31:18 +08:00
SQOOP-309. Update Sqoop dependency versions.
git-svn-id: https://svn.apache.org/repos/asf/incubator/sqoop/trunk@1158378 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fb3aa233bd
commit
fe9f6c723a
4
ivy.xml
4
ivy.xml
@ -68,9 +68,9 @@ under the License.
|
|||||||
</publications>
|
</publications>
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<!-- Dependencies for Cloudera's Distribution for Hadoop -->
|
<!-- Dependencies for Cloudera's Distribution for Hadoop -->
|
||||||
<dependency org="com.cloudera.hadoop" name="hadoop-core"
|
<dependency org="org.apache.hadoop" name="hadoop-core"
|
||||||
rev="${hadoop-core.cloudera.version}" conf="cloudera->default"/>
|
rev="${hadoop-core.cloudera.version}" conf="cloudera->default"/>
|
||||||
<dependency org="com.cloudera.hadoop" name="hadoop-test"
|
<dependency org="org.apache.hadoop" name="hadoop-test"
|
||||||
rev="${hadoop-core.cloudera.version}" conf="clouderatest->default"/>
|
rev="${hadoop-core.cloudera.version}" conf="clouderatest->default"/>
|
||||||
|
|
||||||
<!-- Common dependencies for Sqoop -->
|
<!-- Common dependencies for Sqoop -->
|
||||||
|
@ -29,10 +29,10 @@ commons-lang.version=2.4
|
|||||||
commons-logging.version=1.0.4
|
commons-logging.version=1.0.4
|
||||||
|
|
||||||
# Cloudera Distribution dependency version
|
# Cloudera Distribution dependency version
|
||||||
hadoop-core.cloudera.version=0.20.2-737
|
hadoop-core.cloudera.version=0.20.2-cdh3u1
|
||||||
|
|
||||||
hbase.version=0.89.20100924-28
|
hbase.version=0.90.3-cdh3u1
|
||||||
zookeeper.version=3.3.1+7
|
zookeeper.version=3.3.3-cdh3u1
|
||||||
|
|
||||||
hsqldb.version=1.8.0.10
|
hsqldb.version=1.8.0.10
|
||||||
|
|
||||||
|
76
pom.xml
76
pom.xml
@ -82,9 +82,9 @@ limitations under the License.
|
|||||||
<hadoopVersion>0.20.203.0</hadoopVersion>
|
<hadoopVersion>0.20.203.0</hadoopVersion>
|
||||||
-->
|
-->
|
||||||
<!-- FIXME Cloudera Distribution dependency version -->
|
<!-- FIXME Cloudera Distribution dependency version -->
|
||||||
<hadoopVersion>0.20.2-737</hadoopVersion>
|
<hadoopVersion>0.20.2-cdh3u1</hadoopVersion>
|
||||||
|
|
||||||
<hbaseVersion>0.90.3</hbaseVersion>
|
<hbaseVersion>0.90.3-cdh3u1</hbaseVersion>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
@ -107,22 +107,90 @@ limitations under the License.
|
|||||||
<groupId>org.apache.avro</groupId>
|
<groupId>org.apache.avro</groupId>
|
||||||
<artifactId>avro</artifactId>
|
<artifactId>avro</artifactId>
|
||||||
<version>${avroVersion}</version>
|
<version>${avroVersion}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.mortbay.jetty</groupId>
|
||||||
|
<artifactId>jetty</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.jboss.netty</groupId>
|
||||||
|
<artifactId>netty</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.velocity</groupId>
|
||||||
|
<artifactId>velocity</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.avro</groupId>
|
<groupId>org.apache.avro</groupId>
|
||||||
<artifactId>avro-mapred</artifactId>
|
<artifactId>avro-mapred</artifactId>
|
||||||
<version>${avroVersion}</version>
|
<version>${avroVersion}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.mortbay.jetty</groupId>
|
||||||
|
<artifactId>jetty</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.jboss.netty</groupId>
|
||||||
|
<artifactId>netty</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.velocity</groupId>
|
||||||
|
<artifactId>velocity</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.cloudera.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-core</artifactId>
|
<artifactId>hadoop-core</artifactId>
|
||||||
<version>${hadoopVersion}</version>
|
<version>${hadoopVersion}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
|
<artifactId>jackson-core-asl</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
<artifactId>hbase</artifactId>
|
<artifactId>hbase</artifactId>
|
||||||
<version>${hbaseVersion}</version>
|
<version>${hbaseVersion}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.avro</groupId>
|
||||||
|
<artifactId>avro</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.sun.jersey</groupId>
|
||||||
|
<artifactId>jersey-core</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.sun.jersey</groupId>
|
||||||
|
<artifactId>jersey-json</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.sun.jersey</groupId>
|
||||||
|
<artifactId>jersey-server</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.thrift</groupId>
|
||||||
|
<artifactId>thrift</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>log4j</groupId>
|
||||||
|
<artifactId>log4j</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
@ -143,7 +211,7 @@ limitations under the License.
|
|||||||
FIXME we must depends on org.apache !
|
FIXME we must depends on org.apache !
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
-->
|
-->
|
||||||
<groupId>com.cloudera.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-test</artifactId>
|
<artifactId>hadoop-test</artifactId>
|
||||||
<version>${hadoopVersion}</version>
|
<version>${hadoopVersion}</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
|
@ -42,7 +42,8 @@ public class AppendUtils {
|
|||||||
|
|
||||||
private static final SimpleDateFormat DATE_FORM = new SimpleDateFormat(
|
private static final SimpleDateFormat DATE_FORM = new SimpleDateFormat(
|
||||||
"ddHHmmssSSS");
|
"ddHHmmssSSS");
|
||||||
private static final String TEMP_IMPORT_ROOT = System.getProperty( "sqoop.test.import.rootDir", "_sqoop");
|
private static final String TEMP_IMPORT_ROOT =
|
||||||
|
System.getProperty("sqoop.test.import.rootDir", "_sqoop");
|
||||||
|
|
||||||
private static final int PARTITION_DIGITS = 5;
|
private static final int PARTITION_DIGITS = 5;
|
||||||
private static final String FILEPART_SEPARATOR = "-";
|
private static final String FILEPART_SEPARATOR = "-";
|
||||||
|
@ -93,13 +93,17 @@ public void testAvroImport() throws IOException {
|
|||||||
|
|
||||||
assertEquals("INTFIELD1", fields.get(0).name());
|
assertEquals("INTFIELD1", fields.get(0).name());
|
||||||
assertEquals(Schema.Type.UNION, fields.get(0).schema().getType());
|
assertEquals(Schema.Type.UNION, fields.get(0).schema().getType());
|
||||||
assertEquals(Schema.Type.INT, fields.get(0).schema().getTypes().get(0).getType());
|
assertEquals(Schema.Type.INT,
|
||||||
assertEquals(Schema.Type.NULL, fields.get(0).schema().getTypes().get(1).getType());
|
fields.get(0).schema().getTypes().get(0).getType());
|
||||||
|
assertEquals(Schema.Type.NULL,
|
||||||
|
fields.get(0).schema().getTypes().get(1).getType());
|
||||||
|
|
||||||
assertEquals("INTFIELD2", fields.get(1).name());
|
assertEquals("INTFIELD2", fields.get(1).name());
|
||||||
assertEquals(Schema.Type.UNION, fields.get(1).schema().getType());
|
assertEquals(Schema.Type.UNION, fields.get(1).schema().getType());
|
||||||
assertEquals(Schema.Type.INT, fields.get(1).schema().getTypes().get(0).getType());
|
assertEquals(Schema.Type.INT,
|
||||||
assertEquals(Schema.Type.NULL, fields.get(1).schema().getTypes().get(1).getType());
|
fields.get(1).schema().getTypes().get(0).getType());
|
||||||
|
assertEquals(Schema.Type.NULL,
|
||||||
|
fields.get(1).schema().getTypes().get(1).getType());
|
||||||
|
|
||||||
GenericRecord record1 = reader.next();
|
GenericRecord record1 = reader.next();
|
||||||
assertEquals(1, record1.get("INTFIELD1"));
|
assertEquals(1, record1.get("INTFIELD1"));
|
||||||
|
@ -47,6 +47,21 @@
|
|||||||
*/
|
*/
|
||||||
public abstract class HBaseTestCase extends ImportJobTestCase {
|
public abstract class HBaseTestCase extends ImportJobTestCase {
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This is to restore test.build.data system property which gets reset
|
||||||
|
* when HBase tests are run. Since other tests in Sqoop also depend upon
|
||||||
|
* this property, they can fail if are run subsequently in the same VM.
|
||||||
|
*/
|
||||||
|
private static String testBuildDataProperty = "";
|
||||||
|
|
||||||
|
private static void recordTestBuildDataProperty() {
|
||||||
|
testBuildDataProperty = System.getProperty("test.build.data", "");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void restoreTestBuidlDataProperty() {
|
||||||
|
System.setProperty("test.build.data", testBuildDataProperty);
|
||||||
|
}
|
||||||
|
|
||||||
public static final Log LOG = LogFactory.getLog(
|
public static final Log LOG = LogFactory.getLog(
|
||||||
HBaseTestCase.class.getName());
|
HBaseTestCase.class.getName());
|
||||||
|
|
||||||
@ -105,6 +120,7 @@ private void startMaster() throws Exception {
|
|||||||
@Override
|
@Override
|
||||||
@Before
|
@Before
|
||||||
public void setUp() {
|
public void setUp() {
|
||||||
|
HBaseTestCase.recordTestBuildDataProperty();
|
||||||
try {
|
try {
|
||||||
startMaster();
|
startMaster();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
@ -133,7 +149,7 @@ public void tearDown() {
|
|||||||
LOG.warn("Error shutting down HBase minicluster: "
|
LOG.warn("Error shutting down HBase minicluster: "
|
||||||
+ StringUtils.stringifyException(e));
|
+ StringUtils.stringifyException(e));
|
||||||
}
|
}
|
||||||
|
HBaseTestCase.restoreTestBuidlDataProperty();
|
||||||
super.tearDown();
|
super.tearDown();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,7 +58,8 @@ public void testFailedImportDueToIOException() throws IOException {
|
|||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
|
||||||
LogFactory.getLog( getClass() ).info( " getWarehouseDir() " + getWarehouseDir() );
|
LogFactory.getLog(getClass()).info(
|
||||||
|
" getWarehouseDir() " + getWarehouseDir());
|
||||||
|
|
||||||
// Make the output dir exist so we know the job will fail via IOException.
|
// Make the output dir exist so we know the job will fail via IOException.
|
||||||
Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());
|
Path outputPath = new Path(new Path(getWarehouseDir()), getTableName());
|
||||||
|
Loading…
Reference in New Issue
Block a user