mirror of
https://github.com/apache/sqoop.git
synced 2025-05-06 18:59:35 +08:00
SQOOP-2238: Sqoop2: Parquet jars missing
(Abraham Elmahrek via Jarek Jarcec Cecho)
This commit is contained in:
parent
3ba34e250d
commit
a18d27e371
@ -59,6 +59,14 @@ public Set<String> getJars(InitializerContext context,
|
|||||||
jars.add(ClassUtils.jarForClass("org.kitesdk.data.Datasets"));
|
jars.add(ClassUtils.jarForClass("org.kitesdk.data.Datasets"));
|
||||||
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.databind.JsonNode"));
|
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.databind.JsonNode"));
|
||||||
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.core.TreeNode"));
|
jars.add(ClassUtils.jarForClass("com.fasterxml.jackson.core.TreeNode"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.hadoop.metadata.CompressionCodecName"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.format.CompressionCodec"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.avro.AvroParquetWriter"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.column.ParquetProperties"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.Version"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.org.codehaus.jackson.type.TypeReference"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.bytes.CapacityByteArrayOutputStream"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.encoding.Generator"));
|
||||||
if (fromJobConfig.fromJobConfig.uri.startsWith("dataset:hive")) {
|
if (fromJobConfig.fromJobConfig.uri.startsWith("dataset:hive")) {
|
||||||
// @TODO(Abe): Remove a deps that aren't used?
|
// @TODO(Abe): Remove a deps that aren't used?
|
||||||
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
|
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
|
||||||
@ -72,6 +80,7 @@ public Set<String> getJars(InitializerContext context,
|
|||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
|
||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
|
||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.hive.serde.ParquetHiveSerDe"));
|
||||||
}
|
}
|
||||||
return jars;
|
return jars;
|
||||||
}
|
}
|
||||||
|
@ -65,6 +65,16 @@ public Set<String> getJars(InitializerContext context,
|
|||||||
if (FileFormat.CSV.equals(toJobConfig.toJobConfig.fileFormat)) {
|
if (FileFormat.CSV.equals(toJobConfig.toJobConfig.fileFormat)) {
|
||||||
jars.add(ClassUtils.jarForClass("au.com.bytecode.opencsv.CSVWriter"));
|
jars.add(ClassUtils.jarForClass("au.com.bytecode.opencsv.CSVWriter"));
|
||||||
}
|
}
|
||||||
|
if (FileFormat.PARQUET.equals(toJobConfig.toJobConfig.fileFormat)) {
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.hadoop.metadata.CompressionCodecName"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.format.CompressionCodec"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.avro.AvroParquetWriter"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.column.ParquetProperties"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.Version"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.org.codehaus.jackson.type.TypeReference"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.bytes.CapacityByteArrayOutputStream"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.encoding.Generator"));
|
||||||
|
}
|
||||||
if (toJobConfig.toJobConfig.uri.startsWith("dataset:hive")) {
|
if (toJobConfig.toJobConfig.uri.startsWith("dataset:hive")) {
|
||||||
// @TODO(Abe): Remove a deps that aren't used?
|
// @TODO(Abe): Remove a deps that aren't used?
|
||||||
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
|
jars.add(ClassUtils.jarForClass("org.apache.hadoop.hive.conf.HiveConf"));
|
||||||
@ -78,6 +88,7 @@ public Set<String> getJars(InitializerContext context,
|
|||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.query.compiler.JavaQueryCompiler"));
|
||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.query.typesafe.TypesafeSubquery"));
|
||||||
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
|
jars.add(ClassUtils.jarForClass("org.datanucleus.store.rdbms.sql.SQLStatement"));
|
||||||
|
jars.add(ClassUtils.jarForClass("parquet.hive.serde.ParquetHiveSerDe"));
|
||||||
}
|
}
|
||||||
return jars;
|
return jars;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user