mirror of
https://github.com/alibaba/DataX.git
synced 2025-05-01 18:40:44 +08:00
gen default parquet schema if config not set
This commit is contained in:
parent
3614c2633e
commit
7bb69f6c85
@ -630,7 +630,14 @@ public class HdfsHelper {
|
||||
MessageType messageType = null;
|
||||
ParquetFileProccessor proccessor = null;
|
||||
Path outputPath = new Path(fileName);
|
||||
String schema = config.getString(Key.PARQUET_SCHEMA);
|
||||
String schema = config.getString(Key.PARQUET_SCHEMA, null);
|
||||
if (schema == null) {
|
||||
List<Configuration> columns = config.getListConfiguration(Key.COLUMN);
|
||||
if (columns == null || columns.isEmpty()) {
|
||||
throw DataXException.asDataXException("parquetSchema or column can't be blank!");
|
||||
}
|
||||
schema = HdfsHelper.generateParquetSchemaFromColumnAndType(columns);
|
||||
}
|
||||
try {
|
||||
messageType = MessageTypeParser.parseMessageType(schema);
|
||||
} catch (Exception e) {
|
||||
|
@ -228,6 +228,12 @@ public class HdfsWriter extends Writer {
|
||||
String endFullFileName = null;
|
||||
|
||||
fileSuffix = UUID.randomUUID().toString().replace('-', '_');
|
||||
if (fileType.equalsIgnoreCase("PARQUET")) {
|
||||
if (StringUtils.isNotBlank(this.compress)) {
|
||||
fileSuffix += "." + this.compress.toLowerCase();
|
||||
}
|
||||
fileSuffix += ".parquet";
|
||||
}
|
||||
|
||||
fullFileName = String.format("%s%s%s__%s", defaultFS, storePath, filePrefix, fileSuffix);
|
||||
endFullFileName = String.format("%s%s%s__%s", defaultFS, endStorePath, filePrefix, fileSuffix);
|
||||
|
Loading…
Reference in New Issue
Block a user