5
0
mirror of https://github.com/apache/sqoop.git synced 2025-05-06 11:09:26 +08:00

SQOOP-2238: Sqoop2: Parquet jars missing

(Abraham Elmahrek via Jarek Jarcec Cecho)
This commit is contained in:
Jarek Jarcec Cecho 2015-03-29 13:20:22 -07:00
parent 3ba34e250d
commit a18d27e371
2 changed files with 20 additions and 0 deletions

View File

@ -59,6 +59,14 @@ public Set<String> getJars(InitializerContext context,
jars.add(ClassUtils.jarForClass("org.kitesdk.data.Datasets"));
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.databind.JsonNode"));
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.core.TreeNode"));
jars.add(ClassUtils.jarForClass("parquet.hadoop.metadata.CompressionCodecName"));
jars.add(ClassUtils.jarForClass("parquet.format.CompressionCodec"));
jars.add(ClassUtils.jarForClass("parquet.avro.AvroParquetWriter"));
jars.add(ClassUtils.jarForClass("parquet.column.ParquetProperties"));
jars.add(ClassUtils.jarForClass("parquet.Version"));
jars.add(ClassUtils.jarForClass("parquet.org.codehaus.jackson.type.TypeReference"));
jars.add(ClassUtils.jarForClass("parquet.bytes.CapacityByteArrayOutputStream"));
jars.add(ClassUtils.jarForClass("parquet.encoding.Generator"));
if (fromJobConfig.fromJobConfig.uri.startsWith("dataset:hive")) {
// @TODO(Abe): Remove a deps that aren't used?
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
@ -72,6 +80,7 @@ public Set<String> getJars(InitializerContext context,
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
jars.add(ClassUtils.jarForClass("parquet.hive.serde.ParquetHiveSerDe"));
}
return jars;
}

View File

@ -65,6 +65,16 @@ public Set<String> getJars(InitializerContext context,
if (FileFormat.CSV.equals(toJobConfig.toJobConfig.fileFormat)) {
jars.add(ClassUtils.jarForClass("au.com.bytecode.opencsv.CSVWriter"));
}
if (FileFormat.PARQUET.equals(toJobConfig.toJobConfig.fileFormat)) {
jars.add(ClassUtils.jarForClass("parquet.hadoop.metadata.CompressionCodecName"));
jars.add(ClassUtils.jarForClass("parquet.format.CompressionCodec"));
jars.add(ClassUtils.jarForClass("parquet.avro.AvroParquetWriter"));
jars.add(ClassUtils.jarForClass("parquet.column.ParquetProperties"));
jars.add(ClassUtils.jarForClass("parquet.Version"));
jars.add(ClassUtils.jarForClass("parquet.org.codehaus.jackson.type.TypeReference"));
jars.add(ClassUtils.jarForClass("parquet.bytes.CapacityByteArrayOutputStream"));
jars.add(ClassUtils.jarForClass("parquet.encoding.Generator"));
}
if (toJobConfig.toJobConfig.uri.startsWith("dataset:hive")) {
// @TODO(Abe): Remove a deps that aren't used?
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
@ -78,6 +88,7 @@ public Set<String> getJars(InitializerContext context,
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
jars.add(ClassUtils.jarForClass("parquet.hive.serde.ParquetHiveSerDe"));
}
return jars;
}