mirror of
https://github.com/alibaba/DataX.git
synced 2025-05-02 19:31:33 +08:00
resolve code review suggestions
This commit is contained in:
parent
50c40095f3
commit
1a7a00c7d1
@ -22,7 +22,7 @@ TDengineReader 通过 TDengine 的 JDBC driver 查询获取数据。
|
|||||||
"reader": {
|
"reader": {
|
||||||
"name": "tdenginereader",
|
"name": "tdenginereader",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"user": "root",
|
"username": "root",
|
||||||
"password": "taosdata",
|
"password": "taosdata",
|
||||||
"connection": [
|
"connection": [
|
||||||
{
|
{
|
||||||
@ -165,24 +165,8 @@ TDengineReader 通过 TDengine 的 JDBC driver 查询获取数据。
|
|||||||
|
|
||||||
#### 4.1.1 数据特征
|
#### 4.1.1 数据特征
|
||||||
|
|
||||||
建表语句:
|
|
||||||
|
|
||||||
单行记录类似于:
|
|
||||||
|
|
||||||
#### 4.1.2 机器参数
|
#### 4.1.2 机器参数
|
||||||
|
|
||||||
* 执行DataX的机器参数为:
|
|
||||||
1. cpu:
|
|
||||||
2. mem:
|
|
||||||
3. net: 千兆双网卡
|
|
||||||
4. disc: DataX 数据不落磁盘,不统计此项
|
|
||||||
|
|
||||||
* TDengine数据库机器参数为:
|
|
||||||
1. cpu:
|
|
||||||
2. mem:
|
|
||||||
3. net: 千兆双网卡
|
|
||||||
4. disc:
|
|
||||||
|
|
||||||
#### 4.1.3 DataX jvm 参数
|
#### 4.1.3 DataX jvm 参数
|
||||||
|
|
||||||
-Xms1024m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
|
-Xms1024m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
|
||||||
@ -201,9 +185,6 @@ TDengineReader 通过 TDengine 的 JDBC driver 查询获取数据。
|
|||||||
|
|
||||||
说明:
|
说明:
|
||||||
|
|
||||||
1. 这里的单表,主键类型为 bigint(20),自增。
|
|
||||||
2. batchSize 和 通道个数,对性能影响较大。
|
|
||||||
|
|
||||||
#### 4.2.4 性能测试小结
|
#### 4.2.4 性能测试小结
|
||||||
|
|
||||||
1.
|
1.
|
||||||
|
@ -93,7 +93,7 @@ public class TDengineReader extends Reader {
|
|||||||
}
|
}
|
||||||
if (start >= end)
|
if (start >= end)
|
||||||
throw DataXException.asDataXException(TDengineReaderErrorCode.ILLEGAL_VALUE,
|
throw DataXException.asDataXException(TDengineReaderErrorCode.ILLEGAL_VALUE,
|
||||||
"The parameter [" + Key.BEGIN_DATETIME + "] should be less than the parameter [" + Key.END_DATETIME + "].");
|
"The parameter " + Key.BEGIN_DATETIME + ": " + beginDatetime + " should be less than the parameter " + Key.END_DATETIME + ": " + endDatetime + ".");
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +119,6 @@ public class TDengineReader extends Reader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.info("Configuration: {}", configurations);
|
|
||||||
return configurations;
|
return configurations;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -142,15 +141,14 @@ public class TDengineReader extends Reader {
|
|||||||
try {
|
try {
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
||||||
Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
|
Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
|
||||||
} catch (ClassNotFoundException e) {
|
} catch (ClassNotFoundException ignored) {
|
||||||
e.printStackTrace();
|
LOG.warn(ignored.getMessage(), ignored);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init() {
|
public void init() {
|
||||||
this.readerSliceConfig = super.getPluginJobConf();
|
this.readerSliceConfig = super.getPluginJobConf();
|
||||||
LOG.info("getPluginJobConf: {}", readerSliceConfig);
|
|
||||||
|
|
||||||
String user = readerSliceConfig.getString(Key.USERNAME);
|
String user = readerSliceConfig.getString(Key.USERNAME);
|
||||||
String password = readerSliceConfig.getString(Key.PASSWORD);
|
String password = readerSliceConfig.getString(Key.PASSWORD);
|
||||||
@ -174,7 +172,12 @@ public class TDengineReader extends Reader {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void destroy() {
|
public void destroy() {
|
||||||
|
try {
|
||||||
|
if (conn != null)
|
||||||
|
conn.close();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
LOG.error(e.getMessage(), e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -199,22 +202,15 @@ public class TDengineReader extends Reader {
|
|||||||
sqlList.addAll(querySql);
|
sqlList.addAll(querySql);
|
||||||
}
|
}
|
||||||
|
|
||||||
try (Statement stmt = conn.createStatement()) {
|
for (String sql : sqlList) {
|
||||||
for (String sql : sqlList) {
|
try (Statement stmt = conn.createStatement()) {
|
||||||
ResultSet rs = stmt.executeQuery(sql);
|
ResultSet rs = stmt.executeQuery(sql);
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
Record record = buildRecord(recordSender, rs, mandatoryEncoding);
|
Record record = buildRecord(recordSender, rs, mandatoryEncoding);
|
||||||
recordSender.sendToWriter(record);
|
recordSender.sendToWriter(record);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw DataXException.asDataXException(TDengineReaderErrorCode.RUNTIME_EXCEPTION, e.getMessage(), e);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
if (conn != null)
|
|
||||||
conn.close();
|
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
e.printStackTrace();
|
LOG.error(e.getMessage(), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
public class Constants {
|
public class Constants {
|
||||||
public static final String DEFAULT_USERNAME = "root";
|
public static final int DEFAULT_BATCH_SIZE = 1000;
|
||||||
public static final String DEFAULT_PASSWORD = "taosdata";
|
|
||||||
public static final int DEFAULT_BATCH_SIZE = 1;
|
|
||||||
public static final boolean DEFAULT_IGNORE_TAGS_UNMATCHED = false;
|
public static final boolean DEFAULT_IGNORE_TAGS_UNMATCHED = false;
|
||||||
}
|
}
|
@ -52,14 +52,13 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
try {
|
try {
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
||||||
Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
|
Class.forName("com.taosdata.jdbc.rs.RestfulDriver");
|
||||||
} catch (ClassNotFoundException e) {
|
} catch (ClassNotFoundException ignored) {
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public DefaultDataHandler(Configuration configuration, TaskPluginCollector taskPluginCollector) {
|
public DefaultDataHandler(Configuration configuration, TaskPluginCollector taskPluginCollector) {
|
||||||
this.username = configuration.getString(Key.USERNAME, Constants.DEFAULT_USERNAME);
|
this.username = configuration.getString(Key.USERNAME);
|
||||||
this.password = configuration.getString(Key.PASSWORD, Constants.DEFAULT_PASSWORD);
|
this.password = configuration.getString(Key.PASSWORD);
|
||||||
this.jdbcUrl = configuration.getString(Key.JDBC_URL);
|
this.jdbcUrl = configuration.getString(Key.JDBC_URL);
|
||||||
this.batchSize = configuration.getInt(Key.BATCH_SIZE, Constants.DEFAULT_BATCH_SIZE);
|
this.batchSize = configuration.getInt(Key.BATCH_SIZE, Constants.DEFAULT_BATCH_SIZE);
|
||||||
this.tables = configuration.getList(Key.TABLE, String.class);
|
this.tables = configuration.getList(Key.TABLE, String.class);
|
||||||
@ -73,14 +72,15 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
int count = 0;
|
int count = 0;
|
||||||
int affectedRows = 0;
|
int affectedRows = 0;
|
||||||
|
|
||||||
|
|
||||||
try (Connection conn = DriverManager.getConnection(jdbcUrl, username, password)) {
|
try (Connection conn = DriverManager.getConnection(jdbcUrl, username, password)) {
|
||||||
LOG.info("connection[ jdbcUrl: " + jdbcUrl + ", username: " + username + "] established.");
|
LOG.info("connection[ jdbcUrl: " + jdbcUrl + ", username: " + username + "] established.");
|
||||||
// prepare table_name -> table_meta
|
if (schemaManager == null) {
|
||||||
this.schemaManager = new SchemaManager(conn);
|
// prepare table_name -> table_meta
|
||||||
this.tableMetas = schemaManager.loadTableMeta(tables);
|
this.schemaManager = new SchemaManager(conn);
|
||||||
// prepare table_name -> column_meta
|
this.tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
this.columnMetas = schemaManager.loadColumnMetas(tables);
|
// prepare table_name -> column_meta
|
||||||
|
this.columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
}
|
||||||
|
|
||||||
List<Record> recordBatch = new ArrayList<>();
|
List<Record> recordBatch = new ArrayList<>();
|
||||||
Record record;
|
Record record;
|
||||||
@ -89,10 +89,11 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
recordBatch.add(record);
|
recordBatch.add(record);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
affectedRows = writeBatch(conn, recordBatch);
|
recordBatch.add(record);
|
||||||
} catch (SQLException e) {
|
affectedRows += writeBatch(conn, recordBatch);
|
||||||
|
} catch (Exception e) {
|
||||||
LOG.warn("use one row insert. because:" + e.getMessage());
|
LOG.warn("use one row insert. because:" + e.getMessage());
|
||||||
affectedRows = writeEachRow(conn, recordBatch);
|
affectedRows += writeEachRow(conn, recordBatch);
|
||||||
}
|
}
|
||||||
recordBatch.clear();
|
recordBatch.clear();
|
||||||
}
|
}
|
||||||
@ -101,10 +102,10 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
|
|
||||||
if (!recordBatch.isEmpty()) {
|
if (!recordBatch.isEmpty()) {
|
||||||
try {
|
try {
|
||||||
affectedRows = writeBatch(conn, recordBatch);
|
affectedRows += writeBatch(conn, recordBatch);
|
||||||
} catch (SQLException e) {
|
} catch (Exception e) {
|
||||||
LOG.warn("use one row insert. because:" + e.getMessage());
|
LOG.warn("use one row insert. because:" + e.getMessage());
|
||||||
affectedRows = writeEachRow(conn, recordBatch);
|
affectedRows += writeEachRow(conn, recordBatch);
|
||||||
}
|
}
|
||||||
recordBatch.clear();
|
recordBatch.clear();
|
||||||
}
|
}
|
||||||
@ -126,8 +127,8 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
recordList.add(record);
|
recordList.add(record);
|
||||||
try {
|
try {
|
||||||
affectedRows += writeBatch(conn, recordList);
|
affectedRows += writeBatch(conn, recordList);
|
||||||
} catch (SQLException e) {
|
} catch (Exception e) {
|
||||||
LOG.error(e.getMessage());
|
LOG.error(e.getMessage(), e);
|
||||||
this.taskPluginCollector.collectDirtyRecord(record, e);
|
this.taskPluginCollector.collectDirtyRecord(record, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -145,7 +146,7 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
* 3. 对于tb,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, t1] => insert into tb(ts, f1, f2) values(ts, f1, f2)
|
* 3. 对于tb,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, t1] => insert into tb(ts, f1, f2) values(ts, f1, f2)
|
||||||
* 4. 对于t,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, f3, t1, t2] insert into t(ts, f1, f2, f3, t1, t2) values(ts, f1, f2, f3, t1, t2)
|
* 4. 对于t,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, f3, t1, t2] insert into t(ts, f1, f2, f3, t1, t2) values(ts, f1, f2, f3, t1, t2)
|
||||||
*/
|
*/
|
||||||
public int writeBatch(Connection conn, List<Record> recordBatch) throws SQLException {
|
public int writeBatch(Connection conn, List<Record> recordBatch) throws Exception {
|
||||||
int affectedRows = 0;
|
int affectedRows = 0;
|
||||||
for (String table : tables) {
|
for (String table : tables) {
|
||||||
TableMeta tableMeta = tableMetas.get(table);
|
TableMeta tableMeta = tableMetas.get(table);
|
||||||
@ -173,31 +174,62 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
||||||
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
||||||
*/
|
*/
|
||||||
private int writeBatchToSupTableBySQL(Connection conn, String table, List<Record> recordBatch) throws SQLException {
|
private int writeBatchToSupTableBySQL(Connection conn, String table, List<Record> recordBatch) throws Exception {
|
||||||
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
StringBuilder sb = new StringBuilder("insert into");
|
StringBuilder sb = new StringBuilder("insert into");
|
||||||
for (Record record : recordBatch) {
|
for (Record record : recordBatch) {
|
||||||
sb.append(" ").append(record.getColumn(indexOf("tbname")).asString())
|
sb.append(" ").append(record.getColumn(indexOf("tbname")).asString())
|
||||||
.append(" using ").append(table)
|
.append(" using ").append(table)
|
||||||
.append(" tags")
|
.append(" tags");
|
||||||
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
// sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
return colMeta.isTag;
|
// return colMeta.isTag;
|
||||||
}).map(colMeta -> {
|
// }).map(colMeta -> {
|
||||||
return buildColumnValue(colMeta, record);
|
// return buildColumnValue(colMeta, record);
|
||||||
}).collect(Collectors.joining(",", "(", ")")))
|
// }).collect(Collectors.joining(",", "(", ")")));
|
||||||
.append(" ")
|
sb.append("(");
|
||||||
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
for (int i = 0; i < columns.size(); i++) {
|
||||||
|
ColumnMeta colMeta = columnMetas.get(i);
|
||||||
|
if (!columns.contains(colMeta.field))
|
||||||
|
continue;
|
||||||
|
if (!colMeta.isTag)
|
||||||
|
continue;
|
||||||
|
String tagValue = buildColumnValue(colMeta, record);
|
||||||
|
if (i == 0) {
|
||||||
|
sb.append(tagValue);
|
||||||
|
} else {
|
||||||
|
sb.append(",").append(tagValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.append(")");
|
||||||
|
|
||||||
|
sb.append(" ").append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
return !colMeta.isTag;
|
return !colMeta.isTag;
|
||||||
}).map(colMeta -> {
|
}).map(colMeta -> {
|
||||||
return colMeta.field;
|
return colMeta.field;
|
||||||
}).collect(Collectors.joining(",", "(", ")")))
|
}).collect(Collectors.joining(",", "(", ")")))
|
||||||
.append(" values")
|
.append(" values");
|
||||||
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
|
||||||
return !colMeta.isTag;
|
// sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
}).map(colMeta -> {
|
// return !colMeta.isTag;
|
||||||
return buildColumnValue(colMeta, record);
|
// }).map(colMeta -> {
|
||||||
}).collect(Collectors.joining(",", "(", ")")));
|
// return buildColumnValue(colMeta, record);
|
||||||
|
// }).collect(Collectors.joining(",", "(", ")")));
|
||||||
|
sb.append("(");
|
||||||
|
for (int i = 0; i < columnMetas.size(); i++) {
|
||||||
|
ColumnMeta colMeta = columnMetas.get(i);
|
||||||
|
if (!columns.contains(colMeta.field))
|
||||||
|
continue;
|
||||||
|
if (colMeta.isTag)
|
||||||
|
continue;
|
||||||
|
String colValue = buildColumnValue(colMeta, record);
|
||||||
|
if (i == 0) {
|
||||||
|
sb.append(colValue);
|
||||||
|
} else {
|
||||||
|
sb.append(",").append(colValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.append(")");
|
||||||
}
|
}
|
||||||
String sql = sb.toString();
|
String sql = sb.toString();
|
||||||
|
|
||||||
@ -213,10 +245,11 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
private String buildColumnValue(ColumnMeta colMeta, Record record) {
|
private String buildColumnValue(ColumnMeta colMeta, Record record) throws Exception {
|
||||||
Column column = record.getColumn(indexOf(colMeta.field));
|
Column column = record.getColumn(indexOf(colMeta.field));
|
||||||
TimestampPrecision timestampPrecision = schemaManager.loadDatabasePrecision();
|
TimestampPrecision timestampPrecision = schemaManager.loadDatabasePrecision();
|
||||||
switch (column.getType()) {
|
Column.Type type = column.getType();
|
||||||
|
switch (type) {
|
||||||
case DATE: {
|
case DATE: {
|
||||||
Date value = column.asDate();
|
Date value = column.asDate();
|
||||||
switch (timestampPrecision) {
|
switch (timestampPrecision) {
|
||||||
@ -243,8 +276,9 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
case DOUBLE:
|
case DOUBLE:
|
||||||
case INT:
|
case INT:
|
||||||
case LONG:
|
case LONG:
|
||||||
|
column.asString();
|
||||||
default:
|
default:
|
||||||
return column.asString();
|
throw new Exception("invalid column type: " + type);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -392,7 +426,7 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
* else
|
* else
|
||||||
* insert into tb1 (ts, f1, f2) values( record[idx(ts)], record[idx(f1)], record[idx(f2)])
|
* insert into tb1 (ts, f1, f2) values( record[idx(ts)], record[idx(f1)], record[idx(f2)])
|
||||||
*/
|
*/
|
||||||
private int writeBatchToSubTable(Connection conn, String table, List<Record> recordBatch) throws SQLException {
|
private int writeBatchToSubTable(Connection conn, String table, List<Record> recordBatch) throws Exception {
|
||||||
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
@ -419,11 +453,25 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
if (ignoreTagsUnmatched && !tagsAllMatch)
|
if (ignoreTagsUnmatched && !tagsAllMatch)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
// sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
return !colMeta.isTag;
|
// return !colMeta.isTag;
|
||||||
}).map(colMeta -> {
|
// }).map(colMeta -> {
|
||||||
return buildColumnValue(colMeta, record);
|
// return buildColumnValue(colMeta, record);
|
||||||
}).collect(Collectors.joining(", ", "(", ") ")));
|
// }).collect(Collectors.joining(", ", "(", ") ")));
|
||||||
|
sb.append("(");
|
||||||
|
for (int i = 0; i < columnMetas.size(); i++) {
|
||||||
|
ColumnMeta colMeta = columnMetas.get(i);
|
||||||
|
if (colMeta.isTag)
|
||||||
|
continue;
|
||||||
|
String colValue = buildColumnValue(colMeta, record);
|
||||||
|
if (i == 0) {
|
||||||
|
sb.append(colValue);
|
||||||
|
} else {
|
||||||
|
sb.append(",").append(colValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.append(")");
|
||||||
|
|
||||||
validRecords++;
|
validRecords++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,7 +510,7 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
* table: ["weather"], column: ["ts, f1, f2, f3, t1, t2"]
|
* table: ["weather"], column: ["ts, f1, f2, f3, t1, t2"]
|
||||||
* sql: insert into weather (ts, f1, f2, f3, t1, t2) values( record[idx(ts), record[idx(f1)], ...)
|
* sql: insert into weather (ts, f1, f2, f3, t1, t2) values( record[idx(ts), record[idx(f1)], ...)
|
||||||
*/
|
*/
|
||||||
private int writeBatchToNormalTable(Connection conn, String table, List<Record> recordBatch) throws SQLException {
|
private int writeBatchToNormalTable(Connection conn, String table, List<Record> recordBatch) throws Exception {
|
||||||
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
@ -474,9 +522,22 @@ public class DefaultDataHandler implements DataHandler {
|
|||||||
.append(" values ");
|
.append(" values ");
|
||||||
|
|
||||||
for (Record record : recordBatch) {
|
for (Record record : recordBatch) {
|
||||||
sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).map(colMeta -> {
|
// sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).map(colMeta -> {
|
||||||
return buildColumnValue(colMeta, record);
|
// return buildColumnValue(colMeta, record);
|
||||||
}).collect(Collectors.joining(",", "(", ")")));
|
// }).collect(Collectors.joining(",", "(", ")")));
|
||||||
|
sb.append("(");
|
||||||
|
for (int i = 0; i < columnMetas.size(); i++) {
|
||||||
|
ColumnMeta colMeta = columnMetas.get(i);
|
||||||
|
if (!columns.contains(colMeta.field))
|
||||||
|
continue;
|
||||||
|
String colValue = buildColumnValue(colMeta, record);
|
||||||
|
if (i == 0) {
|
||||||
|
sb.append(colValue);
|
||||||
|
} else {
|
||||||
|
sb.append(",").append(colValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.append(")");
|
||||||
}
|
}
|
||||||
|
|
||||||
String sql = sb.toString();
|
String sql = sb.toString();
|
||||||
|
@ -10,6 +10,7 @@ import java.sql.ResultSet;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class SchemaManager {
|
public class SchemaManager {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(SchemaManager.class);
|
private static final Logger LOG = LoggerFactory.getLogger(SchemaManager.class);
|
||||||
@ -123,12 +124,14 @@ public class SchemaManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
e.printStackTrace();
|
LOG.error(e.getMessage(), e);
|
||||||
}
|
}
|
||||||
colMeta.value = value;
|
colMeta.value = value;
|
||||||
});
|
});
|
||||||
|
|
||||||
LOG.debug("load column metadata of " + table + ": " + Arrays.toString(columnMetaList.toArray()));
|
LOG.debug("load column metadata of " + table + ": " +
|
||||||
|
columnMetaList.stream().map(ColumnMeta::toString).collect(Collectors.joining(",", "[", "]"))
|
||||||
|
);
|
||||||
ret.put(table, columnMetaList);
|
ret.put(table, columnMetaList);
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
@ -142,7 +145,9 @@ public class SchemaManager {
|
|||||||
tableMeta.tags = rs.getInt("tags");
|
tableMeta.tags = rs.getInt("tags");
|
||||||
tableMeta.tables = rs.getInt("tables");
|
tableMeta.tables = rs.getInt("tables");
|
||||||
|
|
||||||
LOG.debug("load table metadata of " + tableMeta.tbname + ": " + tableMeta);
|
if (LOG.isDebugEnabled()){
|
||||||
|
LOG.debug("load table metadata of " + tableMeta.tbname + ": " + tableMeta);
|
||||||
|
}
|
||||||
return tableMeta;
|
return tableMeta;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,10 +4,10 @@ import com.alibaba.datax.common.spi.ErrorCode;
|
|||||||
|
|
||||||
public enum TDengineWriterErrorCode implements ErrorCode {
|
public enum TDengineWriterErrorCode implements ErrorCode {
|
||||||
|
|
||||||
REQUIRED_VALUE("TDengineWriter-00", "缺失必要的值"),
|
REQUIRED_VALUE("TDengineWriter-00", "parameter value is missing"),
|
||||||
ILLEGAL_VALUE("TDengineWriter-01", "值非法"),
|
ILLEGAL_VALUE("TDengineWriter-01", "invalid parameter value"),
|
||||||
RUNTIME_EXCEPTION("TDengineWriter-02", "运行时异常"),
|
RUNTIME_EXCEPTION("TDengineWriter-02", "runtime exception"),
|
||||||
TYPE_ERROR("TDengineWriter-03", "Datax类型无法正确映射到TDengine类型");
|
TYPE_ERROR("TDengineWriter-03", "data type mapping error");
|
||||||
|
|
||||||
private final String code;
|
private final String code;
|
||||||
private final String description;
|
private final String description;
|
||||||
|
@ -28,7 +28,7 @@ public class DefaultDataHandlerTest {
|
|||||||
private final TaskPluginCollector taskPluginCollector = new TDengineWriter.Task().getTaskPluginCollector();
|
private final TaskPluginCollector taskPluginCollector = new TDengineWriter.Task().getTaskPluginCollector();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeSupTableBySQL() throws SQLException {
|
public void writeSupTableBySQL() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupAndSubTable();
|
createSupAndSubTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
@ -68,7 +68,7 @@ public class DefaultDataHandlerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeSupTableBySQL_2() throws SQLException {
|
public void writeSupTableBySQL_2() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupAndSubTable();
|
createSupAndSubTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
@ -106,7 +106,7 @@ public class DefaultDataHandlerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeSupTableBySchemaless() throws SQLException {
|
public void writeSupTableBySchemaless() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupTable();
|
createSupTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
@ -146,7 +146,7 @@ public class DefaultDataHandlerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeSubTableWithTableName() throws SQLException {
|
public void writeSubTableWithTableName() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupAndSubTable();
|
createSupAndSubTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
@ -185,7 +185,7 @@ public class DefaultDataHandlerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeSubTableWithoutTableName() throws SQLException {
|
public void writeSubTableWithoutTableName() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupAndSubTable();
|
createSupAndSubTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
@ -224,7 +224,7 @@ public class DefaultDataHandlerTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void writeNormalTable() throws SQLException {
|
public void writeNormalTable() throws Exception {
|
||||||
// given
|
// given
|
||||||
createSupAndSubTable();
|
createSupAndSubTable();
|
||||||
Configuration configuration = Configuration.from("{" +
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
Loading…
Reference in New Issue
Block a user