i18n support for some import log messages

This commit is contained in:
dingbo 2021-11-19 14:20:37 +08:00
parent be78295e11
commit e5c3fed1a9
8 changed files with 82 additions and 13 deletions

View File

@ -17,6 +17,7 @@ import java.util.Properties;
*/
public class DefaultDataHandler implements DataHandler {
private static final Logger LOG = LoggerFactory.getLogger(DefaultDataHandler.class);
static {
try {
Class.forName("com.taosdata.jdbc.TSDBDriver");
@ -38,7 +39,8 @@ public class DefaultDataHandler implements DataHandler {
return 0;
}
if (schemaManager.shouldGuessSchema()) {
LOG.info("无法从配置文件获取表结构信息,尝试从数据库获取");
// 无法从配置文件获取表结构信息尝试从数据库获取
LOG.info(Msg.get("try_get_schema_from_db"));
boolean success = schemaManager.getFromDB(conn);
if (!success) {
return 0;
@ -48,7 +50,8 @@ public class DefaultDataHandler implements DataHandler {
}
int batchSize = Integer.parseInt(properties.getProperty(Key.BATCH_SIZE, "1000"));
if (batchSize < 5) {
LOG.error("batchSize太小会增加自动类型推断错误的概率建议改大后重试");
// batchSize太小会增加自动类型推断错误的概率建议改大后重试
LOG.error(Msg.get("batch_size_too_small"));
return 0;
}
return write(lineReceiver, conn, batchSize, schemaManager, collector);

View File

@ -72,16 +72,19 @@ public class JDBCBatchWriter {
public void append(Record record) throws SQLException {
int columnNum = record.getColumnNumber();
if (columnNum < minColNum) {
collector.collectDirtyRecord(record, "实际列数小于期望列数");
// 实际列数小于期望列数
collector.collectDirtyRecord(record, Msg.get("column_number_error"));
return;
}
String[] tagValues = scm.getTagValuesFromRecord(record);
if (tagValues == null) {
collector.collectDirtyRecord(record, "标签列包含null");
// 标签列包含null
collector.collectDirtyRecord(record, Msg.get("tag_value_error"));
return;
}
if (!scm.hasTimestamp(record)) {
collector.collectDirtyRecord(record, "时间戳列为null或类型错误");
// 时间戳列为null或类型错误
collector.collectDirtyRecord(record, Msg.get("ts_value_error"));
return;
}
String tableName = scm.computeTableName(tagValues);
@ -140,7 +143,8 @@ public class JDBCBatchWriter {
}
}
if (!ok) {
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, String.format("根据采样的%d条数据无法推断第%d列的数据类型", records.size(), i + 1));
// 根据采样的%d条数据无法推断第%d列的数据类型
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, String.format(Msg.get("infer_column_type_error"), records.size(), i + 1));
}
}
LOG.info("Field Types: {}", fieldTypes);

View File

@ -0,0 +1,20 @@
package com.alibaba.datax.plugin.writer.tdenginewriter;
import java.util.Locale;
import java.util.ResourceBundle;
/**
* i18n message util
*/
public class Msg {
private static ResourceBundle bundle;
static {
bundle = ResourceBundle.getBundle("tdenginewritermsg", Locale.getDefault());
}
public static String get(String key) {
return bundle.getString(key);
}
}

View File

@ -65,7 +65,7 @@ public class SchemaManager {
private void getFromConfig(Properties properties) {
stable = properties.getProperty(Key.STABLE);
if (stable == null) {
LOG.error("配置错误: no stable");
LOG.error("Config error: no stable");
return;
}
for (Object key : properties.keySet()) {
@ -98,7 +98,7 @@ public class SchemaManager {
List<String> sortedFieldName = fieldIndexMap.entrySet().stream().sorted((x, y) -> x.getValue().compareTo(y.getValue())).map(e -> e.getKey()).collect(Collectors.toList());
fieldList.addAll(sortedFieldName); // 排序的目的是保证自动建表时列的顺序和输入数据的列的顺序保持一致
canInferSchemaFromConfig = tsColIndex > -1 && !(fixedTagValue.isEmpty() && tagIndexMap.isEmpty()) && !fieldIndexMap.isEmpty();
LOG.info("配置文件解析结果fixedTags=[{}] ,tags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", fixedTagValue.keySet()), String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
LOG.info("Config file parsed resultfixedTags=[{}] ,tags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", fixedTagValue.keySet()), String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
}
public boolean shouldGuessSchema() {
@ -112,8 +112,7 @@ public class SchemaManager {
public boolean configValid() {
boolean valid = (tagList.size() > 0 && fieldList.size() > 0 && tsColIndex > -1) || (tagList.size() == 0 && fieldList.size() == 0 && tsColIndex == -1);
if (!valid) {
LOG.error("配置错误. tag_columnsfield_columnstimestamp_column必须同时存在或同时省略当前解析结果: tag_columns: {}, field_columns:{}, timestamp_column:{} tsColIndex:{}",
(fixedTagValue.size() + tagIndexMap.size()), fieldIndexMap.size(), tsColName, tsColIndex);
LOG.error("Config error: tagColumn, fieldColumn and timestampColumn must be present together or absent together.");
}
return valid;
}
@ -128,7 +127,7 @@ public class SchemaManager {
try {
List<String> stables = getSTables(conn);
if (!stables.contains(stable)) {
LOG.error("超级表{}不存在,无法从数据库获取表结构信息.", stable);
LOG.error("super table {} not exist fail to get schema from database.", stable);
return false;
}
} catch (SQLException e) {
@ -155,7 +154,7 @@ public class SchemaManager {
}
colIndex++;
}
LOG.info("从数据库获取的表结构概要tags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
LOG.info("table infotags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
return true;
} catch (SQLException e) {
LOG.error(e.getMessage());
@ -195,7 +194,7 @@ public class SchemaManager {
sb.deleteCharAt(sb.length() - 1);
sb.append(")");
String q = sb.toString();
LOG.info("自动创建超级表" + q);
LOG.info("run sql" + q);
try (Statement stmt = conn.createStatement()) {
stmt.execute(q);
}

View File

@ -0,0 +1,6 @@
try_get_schema_fromdb=fail to get structure info of target table from configure file and will try to get it from database
batch_size_too_small='batchSize' is too small, please increase it and try again
column_number_error=number of columns is less than expected
tag_value_error=tag columns include 'null' value
ts_value_error=timestamp column type error or null
infer_column_type_error=fail to infer column type: sample count %d, column index %d

View File

@ -0,0 +1,6 @@
try_get_schema_fromdb=fail to get structure info of target table from configure file and will try to get it from database
batch_size_too_small='batchSize' is too small, please increase it and try again
column_number_error=number of columns is less than expected
tag_value_error=tag columns include 'null' value
ts_value_error=timestamp column type error or null
infer_column_type_error=fail to infer column type: sample count %d, column index %d

View File

@ -0,0 +1,6 @@
try_get_schema_fromdb=\u65e0\u6cd5\u4ece\u914d\u7f6e\u6587\u4ef6\u83b7\u53d6\u8868\u7ed3\u6784\u4fe1\u606f\uff0c\u5c1d\u8bd5\u4ece\u6570\u636e\u5e93\u83b7\u53d6
batch_size_too_small=batchSize\u592a\u5c0f\uff0c\u4f1a\u589e\u52a0\u81ea\u52a8\u7c7b\u578b\u63a8\u65ad\u9519\u8bef\u7684\u6982\u7387\uff0c\u5efa\u8bae\u6539\u5927\u540e\u91cd\u8bd5
column_number_error=\u5b9e\u9645\u5217\u6570\u5c0f\u4e8e\u671f\u671b\u5217\u6570
tag_value_error=\u6807\u7b7e\u5217\u5305\u542bnull
ts_value_error=\u65f6\u95f4\u6233\u5217\u4e3anull\u6216\u7c7b\u578b\u9519\u8bef
infer_column_type_error=\u6839\u636e\u91c7\u6837\u7684%d\u6761\u6570\u636e\uff0c\u65e0\u6cd5\u63a8\u65ad\u7b2c%d\u5217\u7684\u6570\u636e\u7c7b\u578b

View File

@ -0,0 +1,25 @@
package com.alibaba.datax.plugin.writer.tdenginewriter;
import org.junit.Test;
import java.util.Locale;
import java.util.ResourceBundle;
import org.junit.Assert;
public class MessageTest {
@Test
public void testChineseMessage() {
Locale local = new Locale("zh", "CN");
ResourceBundle bundle = ResourceBundle.getBundle("tdenginewritermsg", local);
String msg = bundle.getString("try_get_schema_fromdb");
Assert.assertEquals("无法从配置文件获取表结构信息,尝试从数据库获取", msg);
}
@Test
public void testDefaultMessage() {
ResourceBundle bundle = ResourceBundle.getBundle("tdenginewritermsg", Locale.getDefault());
String msg = bundle.getString("try_get_schema_fromdb");
System.out.println(msg);
}
}