mirror of
https://github.com/alibaba/DataX.git
synced 2025-05-02 11:11:08 +08:00
Merge pull request #20 from taosdata/hotfix/TD-12422
tdenginewriter rebuiled
This commit is contained in:
commit
57d82f0636
@ -62,6 +62,7 @@ DataX目前已经有了比较全面的插件体系,主流的RDBMS数据库、N
|
|||||||
| | Elasticsearch | | √ |[写](https://github.com/alibaba/DataX/blob/master/elasticsearchwriter/doc/elasticsearchwriter.md)|
|
| | Elasticsearch | | √ |[写](https://github.com/alibaba/DataX/blob/master/elasticsearchwriter/doc/elasticsearchwriter.md)|
|
||||||
| 时间序列数据库 | OpenTSDB | √ | |[读](https://github.com/alibaba/DataX/blob/master/opentsdbreader/doc/opentsdbreader.md)|
|
| 时间序列数据库 | OpenTSDB | √ | |[读](https://github.com/alibaba/DataX/blob/master/opentsdbreader/doc/opentsdbreader.md)|
|
||||||
| | TSDB | √ | √ |[读](https://github.com/alibaba/DataX/blob/master/tsdbreader/doc/tsdbreader.md) 、[写](https://github.com/alibaba/DataX/blob/master/tsdbwriter/doc/tsdbhttpwriter.md)|
|
| | TSDB | √ | √ |[读](https://github.com/alibaba/DataX/blob/master/tsdbreader/doc/tsdbreader.md) 、[写](https://github.com/alibaba/DataX/blob/master/tsdbwriter/doc/tsdbhttpwriter.md)|
|
||||||
|
| | TDengine | √ | √ |[读](https://github.com/taosdata/DataX/blob/master/tdenginereader/doc/tdenginereader.md) 、[写](https://github.com/taosdata/DataX/blob/master/tdenginewriter/doc/tdenginewriter-CN.md)|
|
||||||
|
|
||||||
# 阿里云DataWorks数据集成
|
# 阿里云DataWorks数据集成
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.taosdata.jdbc</groupId>
|
<groupId>com.taosdata.jdbc</groupId>
|
||||||
<artifactId>taos-jdbcdriver</artifactId>
|
<artifactId>taos-jdbcdriver</artifactId>
|
||||||
<version>2.0.34</version>
|
<version>2.0.37</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@ -7,7 +7,7 @@ public class Key {
|
|||||||
// public static final String PORT = "port";
|
// public static final String PORT = "port";
|
||||||
// public static final String DB = "db";
|
// public static final String DB = "db";
|
||||||
public static final String TABLE = "table";
|
public static final String TABLE = "table";
|
||||||
public static final String USER = "user";
|
public static final String USER = "username";
|
||||||
public static final String PASSWORD = "password";
|
public static final String PASSWORD = "password";
|
||||||
public static final String CONNECTION = "connection";
|
public static final String CONNECTION = "connection";
|
||||||
// public static final String SQL = "sql";
|
// public static final String SQL = "sql";
|
||||||
|
@ -190,8 +190,9 @@ public class TDengineReader extends Reader {
|
|||||||
@Override
|
@Override
|
||||||
public void startRead(RecordSender recordSender) {
|
public void startRead(RecordSender recordSender) {
|
||||||
try (Statement stmt = conn.createStatement()) {
|
try (Statement stmt = conn.createStatement()) {
|
||||||
for (int i = 0; i < tables.size(); i++) {
|
for (String table : tables) {
|
||||||
String sql = "select " + StringUtils.join(columns, ",") + " from " + tables.get(i) + " where _c0 >= " + startTime + " and _c0 < " + endTime;
|
String sql = "select " + StringUtils.join(columns, ",") + " from " + table
|
||||||
|
+ " where _c0 >= " + startTime + " and _c0 < " + endTime;
|
||||||
ResultSet rs = stmt.executeQuery(sql);
|
ResultSet rs = stmt.executeQuery(sql);
|
||||||
ResultSetMetaData metaData = rs.getMetaData();
|
ResultSetMetaData metaData = rs.getMetaData();
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
|
@ -4,21 +4,39 @@
|
|||||||
|
|
||||||
## 1 快速介绍
|
## 1 快速介绍
|
||||||
|
|
||||||
TDengineWriter插件实现了写入数据到TDengine数据库功能。可用于离线同步其它数据库的数据到TDengine。
|
TDengineWriter插件实现了写入数据到TDengine数据库目标表的功能。底层实现上,TDengineWriter通过JDBC连接TDengine,按照TDengine的SQL语法,执行insert语句/schemaless语句,将数据写入TDengine。
|
||||||
|
|
||||||
|
TDengineWriter可以作为数据迁移工具供DBA将其它数据库的数据导入到TDengine。
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 2 实现原理
|
## 2 实现原理
|
||||||
|
|
||||||
TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据reader的类型解析数据。目前有两种写入方式:
|
TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,通过JDBC Driver连接TDengine,执行insert语句/schemaless语句,将数据写入TDengine。
|
||||||
|
|
||||||
1. 对于OpenTSDBReader, TDengineWriter通过JNI方式调用TDengine客户端库文件(taos.lib或taos.dll)中的方法,使用[schemaless的方式](https://www.taosdata.com/cn/documentation/insert#schemaless)写入。
|
在TDengine中,table可以分成超级表、子表、普通表三种类型,超级表和子表包括colum和tag,子表的tag列的值为固定值,普通表与关系型数据库中表的概念一致。(详细请参考:[数据模型](https://www.taosdata.com/docs/cn/v2.0/architecture#model) )
|
||||||
|
|
||||||
|
TDengineWriter支持向超级表、子表、普通表中写入数据,按照table的类型和column参数中是否包含tbname,使用以下方法进行写入:
|
||||||
|
|
||||||
|
1. table为超级表,column中指定tbname:使用自动建表的insert语句,使用tbname作为子表的名称。
|
||||||
|
2. table为超级表,column中未指定tbname:使用schemaless写入,TDengine会根据超级表名、tag值计算一个子表名称。
|
||||||
|
3. table为子表:使用insert语句写入,ignoreTagUnmatched参数为true时,忽略record中tag值与table的tag值不一致的数据。
|
||||||
|
4. table为普通表:使用insert语句写入。
|
||||||
|
|
||||||
2. 对于其它数据源,会根据配置生成SQL语句, 通过[taos-jdbcdriver](https://www.taosdata.com/cn/documentation/connector/java)批量写入。
|
|
||||||
|
|
||||||
这样区分的原因是OpenTSDBReader将opentsdb的数据统一读取为json字符串,Writer端接收到的数据只有1列。而其它Reader插件一般会把数据放在不同列。
|
|
||||||
|
|
||||||
## 3 功能说明
|
## 3 功能说明
|
||||||
### 3.1 从OpenTSDB到TDengine
|
### 3.1 配置样例
|
||||||
#### 3.1.1 配置样例
|
|
||||||
|
配置一个写入TDengine的作业
|
||||||
|
先在TDengine上创建超级表:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
create database if not exists test;
|
||||||
|
create table test.weather (ts timestamp, temperature int, humidity double) tags(is_normal bool, device_id binary(100), address nchar(100));
|
||||||
|
```
|
||||||
|
|
||||||
|
使用下面的Job配置,将数据写入TDengine:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -26,286 +44,65 @@ TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据rea
|
|||||||
"content": [
|
"content": [
|
||||||
{
|
{
|
||||||
"reader": {
|
"reader": {
|
||||||
"name": "opentsdbreader",
|
"name": "streamreader",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"endpoint": "http://192.168.1.180:4242",
|
|
||||||
"column": [
|
"column": [
|
||||||
"weather_temperature"
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "tb1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bool",
|
||||||
|
"random": "0, 50"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bytes",
|
||||||
|
"value": "abcABC123"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "北京朝阳望京"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"beginDateTime": "2021-01-01 00:00:00",
|
"sliceRecordCount": 1
|
||||||
"endDateTime": "2021-01-01 01:00:00"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"writer": {
|
"writer": {
|
||||||
"name": "tdenginewriter",
|
"name": "tdenginewriter",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"host": "192.168.1.180",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
"username": "root",
|
||||||
"password": "taosdata"
|
"password": "taosdata",
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"setting": {
|
|
||||||
"speed": {
|
|
||||||
"channel": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3.1.2 参数说明
|
|
||||||
|
|
||||||
| 参数 | 描述 | 是否必选 | 默认值 |
|
|
||||||
| --------- | -------------------- | -------- | -------- |
|
|
||||||
| host | TDengine实例的host | 是 | 无 |
|
|
||||||
| port | TDengine实例的port | 是 | 无 |
|
|
||||||
| username | TDengine实例的用户名 | 否 | root |
|
|
||||||
| password | TDengine实例的密码 | 否 | taosdata |
|
|
||||||
| dbName | 目的数据库的名称 | 是 | 无 |
|
|
||||||
| batchSize | 每次批量插入多少记录 | 否 | 1 |
|
|
||||||
|
|
||||||
|
|
||||||
#### 3.1.3 类型转换
|
|
||||||
|
|
||||||
目前,由于OpenTSDBReader将opentsdb的数据统一读取为json字符串,TDengineWriter 在做Opentsdb到TDengine的迁移时,按照以下类型进行处理:
|
|
||||||
|
|
||||||
| OpenTSDB数据类型 | DataX 内部类型 | TDengine 数据类型 |
|
|
||||||
| ---------------- | -------------- | ----------------- |
|
|
||||||
| timestamp | Date | timestamp |
|
|
||||||
| Integer(value) | Double | double |
|
|
||||||
| Float(value) | Double | double |
|
|
||||||
| String(value) | String | binary |
|
|
||||||
| Integer(tag) | String | binary |
|
|
||||||
| Float(tag) | String | binary |
|
|
||||||
| String(tag) | String | binary |
|
|
||||||
|
|
||||||
### 3.2 从MongoDB到TDengine
|
|
||||||
|
|
||||||
#### 3.2.1 配置样例
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"job": {
|
|
||||||
"setting": {
|
|
||||||
"speed": {
|
|
||||||
"channel": 2
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"content": [
|
|
||||||
{
|
|
||||||
"reader": {
|
|
||||||
"name": "mongodbreader",
|
|
||||||
"parameter": {
|
|
||||||
"address": [
|
|
||||||
"127.0.0.1:27017"
|
|
||||||
],
|
|
||||||
"userName": "user",
|
|
||||||
"mechanism": "SCRAM-SHA-1",
|
|
||||||
"userPassword": "password",
|
|
||||||
"authDb": "admin",
|
|
||||||
"dbName": "test",
|
|
||||||
"collectionName": "stock",
|
|
||||||
"column": [
|
|
||||||
{
|
|
||||||
"name": "stockID",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "tradeTime",
|
|
||||||
"type": "date"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "lastPrice",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "askPrice1",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "bidPrice1",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "volume",
|
|
||||||
"type": "int"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"writer": {
|
|
||||||
"name": "tdenginewriter",
|
|
||||||
"parameter": {
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
|
||||||
"password": "taosdata",
|
|
||||||
"stable": "stock",
|
|
||||||
"tagColumn": {
|
|
||||||
"industry": "energy",
|
|
||||||
"stockID": 0
|
|
||||||
},
|
|
||||||
"fieldColumn": {
|
|
||||||
"lastPrice": 2,
|
|
||||||
"askPrice1": 3,
|
|
||||||
"bidPrice1": 4,
|
|
||||||
"volume": 5
|
|
||||||
},
|
|
||||||
"timestampColumn": {
|
|
||||||
"tradeTime": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**注:本配置的writer部分同样适用于关系型数据库**
|
|
||||||
|
|
||||||
|
|
||||||
#### 3.2.2 参数说明
|
|
||||||
| 参数 | 描述 | 是否必选 | 默认值 | 备注 |
|
|
||||||
| --------------- | -------------------- | ---------------- | -------- | ------------------ |
|
|
||||||
| host | TDengine实例的host | 是 | 无 |
|
|
||||||
| port | TDengine实例的port | 是 | 无 |
|
|
||||||
| username | TDengine实例的用户名 | 否 | root |
|
|
||||||
| password | TDengine实例的密码 | 否 | taosdata |
|
|
||||||
| dbName | 目的数据库的名称 | 是 | 无 |
|
|
||||||
| batchSize | 每次批量插入多少记录 | 否 | 1000 |
|
|
||||||
| stable | 目标超级表的名称 | 是(OpenTSDB除外) | 无 |
|
|
||||||
| tagColumn | 格式:{tagName1: tagInd1, tagName2: tagInd2}, 标签列在写插件收到的Record中的位置和列名 | 否 | 无 | 位置索引均从0开始, tagInd如果为字符串, 表示固定标签值,不需要从源数据中获取 |
|
|
||||||
| fieldColumn | 格式:{fdName1: fdInd1, fdName2: fdInd2}, 字段列在写插件收到的Record中的位置和列名 | 否 | 无 | |
|
|
||||||
| timestampColumn | 格式:{tsColName: tsColIndex}, 时间戳列在写插件收到的Record中的位置和列名 | 否 | 无 | 时间戳列只能有一个 |
|
|
||||||
|
|
||||||
示例配置中tagColumn有一个industry,它的值是一个固定的字符串“energy”, 作用是给导入的所有数据加一个值为"energy"的固定标签industry。这个应用场景可以是:在源库中,有多个设备采集的数据分表存储,设备名就是表名,可以用这个机制把设备名称转化为标签。
|
|
||||||
|
|
||||||
#### 3.2.3 自动建表规则
|
|
||||||
##### 3.2.3.1 超级表创建规则
|
|
||||||
|
|
||||||
如果配置了tagColumn、 fieldColumn和timestampColumn将会在插入第一条数据前,自动创建超级表。<br>
|
|
||||||
数据列的类型从第1条记录自动推断, 标签列默认类型为`NCHAR(64)`, 比如示例配置,可能生成以下建表语句:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
CREATE STABLE IF NOT EXISTS market_snapshot (
|
|
||||||
tadetime TIMESTAMP,
|
|
||||||
lastprice DOUBLE,
|
|
||||||
askprice1 DOUBLE,
|
|
||||||
bidprice1 DOUBLE,
|
|
||||||
volume INT
|
|
||||||
)
|
|
||||||
TAGS(
|
|
||||||
industry NCHAR(64),
|
|
||||||
stockID NCHAR(64)
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
##### 3.2.3.2 子表创建规则
|
|
||||||
|
|
||||||
子表结构与超级表相同,子表表名生成规则:
|
|
||||||
1. 将标签的value 组合成为如下的字符串: `tag_value1!tag_value2!tag_value3`。
|
|
||||||
2. 计算该字符串的 MD5 散列值 "md5_val"。
|
|
||||||
3. "t_md5val"作为子表名。其中的 "t" 是固定的前缀。
|
|
||||||
|
|
||||||
#### 3.2.4 用户提前建表
|
|
||||||
|
|
||||||
如果你已经创建好目标超级表,那么tagColumn、 fieldColumn和timestampColumn三个字段均可省略, 插件将通过执行通过`describe stableName`获取表结构的信息。
|
|
||||||
此时要求接收到的Record中Column的顺序和执行`describe stableName`返回的列顺序相同, 比如通过`describe stableName`返回以下内容:
|
|
||||||
```
|
|
||||||
Field | Type | Length | Note |
|
|
||||||
=================================================================================
|
|
||||||
ts | TIMESTAMP | 8 | |
|
|
||||||
current | DOUBLE | 8 | |
|
|
||||||
location | BINARY | 10 | TAG |
|
|
||||||
```
|
|
||||||
那么插件收到的数据第1列必须代表时间戳,第2列必须代表电流,第3列必须代表位置。
|
|
||||||
|
|
||||||
#### 3.2.5 注意事项
|
|
||||||
|
|
||||||
1. tagColumn、 fieldColumn和timestampColumn三个字段用于描述目标表的结构信息,这三个配置字段必须同时存在或同时省略。
|
|
||||||
2. 如果存在以上三个配置,且目标表也已经存在,则两者必须一致。**一致性**由用户自己保证,插件不做检查。不一致可能会导致插入失败或插入数据错乱。
|
|
||||||
|
|
||||||
#### 3.2.6 类型转换
|
|
||||||
|
|
||||||
| DataX 内部类型 | TDengine 数据类型 |
|
|
||||||
|-------------- | ----------------- |
|
|
||||||
|Long | BIGINT |
|
|
||||||
|Double | DOUBLE |
|
|
||||||
|String | NCHAR(64) |
|
|
||||||
|Date | TIMESTAMP |
|
|
||||||
|Boolean | BOOL |
|
|
||||||
|Bytes | BINARY(64) |
|
|
||||||
|
|
||||||
### 3.3 从关系型数据库到TDengine
|
|
||||||
writer部分的配置规则和上述MongoDB的示例是一样的,这里给出一个MySQL的示例。
|
|
||||||
|
|
||||||
#### 3.3.1 MySQL中表结构
|
|
||||||
```sql
|
|
||||||
CREATE TABLE IF NOT EXISTS weather(
|
|
||||||
station varchar(100),
|
|
||||||
latitude DOUBLE,
|
|
||||||
longtitude DOUBLE,
|
|
||||||
`date` DATE,
|
|
||||||
TMAX int,
|
|
||||||
TMIN int
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3.3.2 配置文件示例
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"job": {
|
|
||||||
"content": [
|
|
||||||
{
|
|
||||||
"reader": {
|
|
||||||
"name": "mysqlreader",
|
|
||||||
"parameter": {
|
|
||||||
"username": "root",
|
|
||||||
"password": "passw0rd",
|
|
||||||
"column": [
|
"column": [
|
||||||
"*"
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"temperature",
|
||||||
|
"humidity",
|
||||||
|
"is_normal",
|
||||||
|
"device_id",
|
||||||
|
"address"
|
||||||
],
|
],
|
||||||
"splitPk": "station",
|
|
||||||
"connection": [
|
"connection": [
|
||||||
{
|
{
|
||||||
"table": [
|
"table": [
|
||||||
"weather"
|
"weather"
|
||||||
],
|
],
|
||||||
"jdbcUrl": [
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/test"
|
||||||
"jdbc:mysql://127.0.0.1:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf8"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
}
|
"batchSize": 100,
|
||||||
},
|
"ignoreTagsUnmatched": true
|
||||||
"writer": {
|
|
||||||
"name": "tdenginewriter",
|
|
||||||
"parameter": {
|
|
||||||
"host": "127.0.0.1",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
|
||||||
"password": "taosdata",
|
|
||||||
"batchSize": 1000,
|
|
||||||
"stable": "weather",
|
|
||||||
"tagColumn": {
|
|
||||||
"station": 0
|
|
||||||
},
|
|
||||||
"fieldColumn": {
|
|
||||||
"latitude": 1,
|
|
||||||
"longtitude": 2,
|
|
||||||
"tmax": 4,
|
|
||||||
"tmin": 5
|
|
||||||
},
|
|
||||||
"timestampColumn":{
|
|
||||||
"date": 3
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -319,6 +116,72 @@ CREATE TABLE IF NOT EXISTS weather(
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### 3.2 参数说明
|
||||||
|
|
||||||
|
* jdbcUrl
|
||||||
|
* 描述:数据源的JDBC连接信息,TDengine的JDBC信息请参考:[Java连接器的使用](https://www.taosdata.com/docs/cn/v2.0/connector/java#url)
|
||||||
|
* 必选:是
|
||||||
|
* 默认值:无
|
||||||
|
* username
|
||||||
|
* 描述:用户名
|
||||||
|
* 必选:是
|
||||||
|
* 默认值:无
|
||||||
|
* password
|
||||||
|
* 描述:用户名的密码
|
||||||
|
* 必选:是
|
||||||
|
* 默认值:无
|
||||||
|
* table
|
||||||
|
* 描述:表名的集合,table应该包含column参数中的所有列(tbname除外)。注意,column中的tbname会被当作TDengine中子表名使用。
|
||||||
|
* 必选:是
|
||||||
|
* 默认值:无
|
||||||
|
* column
|
||||||
|
* 描述:字段名的集合,字段的顺序应该与record中column的
|
||||||
|
* 必选:是
|
||||||
|
* 默认值:无
|
||||||
|
* batchSize
|
||||||
|
* 描述:每batchSize条record为一个batch进行写入
|
||||||
|
* 必选:否
|
||||||
|
* 默认值:1
|
||||||
|
* ignoreTagsUnmatched
|
||||||
|
* 描述:当table为TDengine中的一张子表,table具有tag值。如果数据的tag值与table的tag值不想等,数据不写入到table中。
|
||||||
|
* 必选:否
|
||||||
|
* 默认值:false
|
||||||
|
|
||||||
|
|
||||||
|
### 3.3 类型转换
|
||||||
|
|
||||||
|
datax中的数据类型,可以映射到TDengine的数据类型
|
||||||
|
|
||||||
|
| DataX 内部类型 | TDengine 数据类型 |
|
||||||
|
| -------------- | ----------------------------------------- |
|
||||||
|
| INT | TINYINT, SMALLINT, INT |
|
||||||
|
| LONG | TIMESTAMP, TINYINT, SMALLINT, INT, BIGINT |
|
||||||
|
| DOUBLE | FLOAT, DOUBLE |
|
||||||
|
| STRING | TIMESTAMP, BINARY, NCHAR |
|
||||||
|
| BOOL | BOOL |
|
||||||
|
| DATE | TIMESTAMP |
|
||||||
|
| BYTES | BINARY |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 3.4 各数据源到TDengine的参考示例
|
||||||
|
|
||||||
|
下面是一些数据源到TDengine进行数据迁移的示例
|
||||||
|
|
||||||
|
| 数据迁移示例 | 配置的示例 |
|
||||||
|
| ------------------ | ------------------------------------------------------------ |
|
||||||
|
| TDengine到TDengine | [超级表到超级表,指定tbname](../src/test/resources/t2t-1.json) |
|
||||||
|
| TDengine到TDengine | [超级表到超级表,不指定tbname](../src/test/resources/t2t-2.json) |
|
||||||
|
| TDengine到TDengine | [超级表到子表](../src/test/resources/t2t-3.json) |
|
||||||
|
| TDengine到TDengine | [普通表到普通表](../src/test/resources/t2t-4.json) |
|
||||||
|
| RDBMS到TDengine | [普通表到超级表,指定tbname](../src/test/resources/dm2t-1.json) |
|
||||||
|
| RDBMS到TDengine | [普通表到超级表,不指定tbname](../src/test/resources/dm2t-2.json) |
|
||||||
|
| RDBMS到TDengine | [普通表到子表](../src/test/resources/dm2t-3.json) |
|
||||||
|
| RDBMS到TDengine | [普通表到普通表](../src/test/resources/dm2t-4.json) |
|
||||||
|
| OpenTSDB到TDengine | [metric到普通表](../src/test/resources/o2t-1.json) |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 4 性能报告
|
## 4 性能报告
|
||||||
|
|
||||||
@ -362,44 +225,21 @@ CREATE TABLE IF NOT EXISTS weather(
|
|||||||
|
|
||||||
说明:
|
说明:
|
||||||
|
|
||||||
1. 这里的单表,主键类型为 bigint(20),自增。
|
1.
|
||||||
2. batchSize 和 通道个数,对性能影响较大。
|
|
||||||
3. 16通道,4096批量提交时,出现 full gc 2次。
|
|
||||||
|
|
||||||
#### 4.2.4 性能测试小结
|
#### 4.2.4 性能测试小结
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 5 约束限制
|
## 5 约束限制
|
||||||
|
|
||||||
1. 本插件自动创建超级表时NCHAR类型的长度固定为64,对于包含长度大于64的字符串的数据源,将不支持。
|
1.
|
||||||
2. 标签列不能包含null值,如果包含会被过滤掉。
|
|
||||||
|
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
|
||||||
### 如何选取要同步的数据的范围?
|
|
||||||
|
|
||||||
数据范围的选取在Reader插件端配置,对于不同的Reader插件配置方法往往不同。比如对于mysqlreader, 可以用sql语句指定数据范围。对于opentsdbreader, 用beginDateTime和endDateTime两个配置项指定数据范围。
|
|
||||||
|
|
||||||
### 如何一次导入多张源表?
|
|
||||||
|
|
||||||
如果Reader插件支持一次读多张表,Writer插件就能一次导入多张表。如果Reader不支持多多张表,可以建多个job,分别导入。Writer插件只负责写数据。
|
|
||||||
|
|
||||||
### 一张源表导入之后对应TDengine中多少张表?
|
|
||||||
|
|
||||||
这是由tagColumn决定的,如果所有tag列的值都相同,那么目标表只有一个。源表有多少不同的tag组合,目标超级表就有多少子表。
|
|
||||||
|
|
||||||
### 源表和目标表的字段顺序一致吗?
|
### 源表和目标表的字段顺序一致吗?
|
||||||
|
|
||||||
TDengine要求每个表第一列是时间戳列,后边是普通字段,最后是标签列。如果源表不是这个顺序,插件在自动建表时会自动调整。
|
是的,TDengineWriter按照column中字段的顺序解析来自datax的数据。
|
||||||
|
|
||||||
### 插件如何确定各列的数据类型?
|
|
||||||
|
|
||||||
根据收到的第一批数据自动推断各列的类型。
|
|
||||||
|
|
||||||
### 为什么插入10年前的数据会抛异常`TDengine ERROR (2350): failed to execute batch bind` ?
|
|
||||||
|
|
||||||
因为创建数据库的时候,默认保留10年的数据。可以手动指定要保留多长时间的数据,比如:`CREATE DATABASE power KEEP 36500;`。
|
|
||||||
|
|
||||||
### 如果编译的时候某些插件的依赖找不到怎么办?
|
|
||||||
|
|
||||||
如果这个插件不是必须的,可以注释掉根目录下的pom.xml中的对应插件。
|
|
||||||
|
@ -4,18 +4,41 @@
|
|||||||
|
|
||||||
## 1 Quick Introduction
|
## 1 Quick Introduction
|
||||||
|
|
||||||
TDengineWriter Plugin writes data to [TDengine](https://www.taosdata.com/en/). It can be used to offline synchronize data from other databases to TDengine.
|
The TDengineWriter plugin enables writing data to the target table of the TDengine database. At the bottom level, TDengineWriter connects TDengine through JDBC, executes insert statement /schemaless statement according to TDengine SQL syntax, and writes data to TDengine.
|
||||||
|
|
||||||
|
TDengineWriter can be used as a data migration tool for DBAs to import data from other databases into TDengine.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 2 Implementation
|
## 2 Implementation
|
||||||
|
|
||||||
TDengineWriter get records from DataX Framework that are generated from reader side. It has two whiting strategies:
|
TDengineWriter obtains the protocol data generated by Reader through DataX framework, connects to TDengine through JDBC Driver, executes insert statement /schemaless statement, and writes the data to TDengine.
|
||||||
|
|
||||||
|
In TDengine, table can be divided into super table, sub-table and ordinary table. Super table and sub-table include Colum and Tag. The value of tag column of sub-table is fixed value. (details please refer to: [data model](https://www.taosdata.com/docs/cn/v2.0/architecture#model))
|
||||||
|
|
||||||
|
The TDengineWriter can write data to super tables, sub-tables, and ordinary tables using the following methods based on the type of the table and whether the column parameter contains TBName:
|
||||||
|
|
||||||
|
1. Table is a super table and column specifies tbname: use the automatic insert statement to create the table and use tbname as the name of the sub-table.
|
||||||
|
|
||||||
|
2. Table is a super table and column does not contain tbname: use schemaless to write the table. TDengine will auto-create a tbname based on the super table name and tag value.
|
||||||
|
|
||||||
|
3. Table is a sub-table: Use insert statement to write, ignoreTagUnmatched parameter is true, ignore data in record whose tag value is inconsistent with that of table.
|
||||||
|
|
||||||
|
4. Table is a common table: use insert statement to write data.
|
||||||
|
|
||||||
1. For data from OpenTSDBReader which is in json format, to leverage the new feature of TDengine Server that support writing json data directly called [schemaless writing](https://www.taosdata.com/cn/documentation/insert#schemaless), we use JNI to call functions in `taos.lib` or `taos.dll`.(Since the feature was not included in taos-jdbcdrive until version 2.0.36).
|
|
||||||
2. For other data sources, we use [taos-jdbcdriver](https://www.taosdata.com/cn/documentation/connector/java) to write data. If the target table is not exists beforehand, then it will be created automatically according to your configuration.
|
|
||||||
|
|
||||||
## 3 Features Introduction
|
## 3 Features Introduction
|
||||||
### 3.1 From OpenTSDB to TDengine
|
### 3.1 Sample
|
||||||
#### 3.1.1 Sample Setting
|
Configure a job to write to TDengine
|
||||||
|
|
||||||
|
Create a supertable on TDengine:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
create database if not exists test;
|
||||||
|
create table test.weather (ts timestamp, temperature int, humidity double) tags(is_normal bool, device_id binary(100), address nchar(100));
|
||||||
|
```
|
||||||
|
|
||||||
|
Write data to TDengine using the following Job configuration:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -23,286 +46,65 @@ TDengineWriter get records from DataX Framework that are generated from reader s
|
|||||||
"content": [
|
"content": [
|
||||||
{
|
{
|
||||||
"reader": {
|
"reader": {
|
||||||
"name": "opentsdbreader",
|
"name": "streamreader",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"endpoint": "http://192.168.1.180:4242",
|
|
||||||
"column": [
|
"column": [
|
||||||
"weather_temperature"
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "tb1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bool",
|
||||||
|
"random": "0, 50"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bytes",
|
||||||
|
"value": "abcABC123"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "北京朝阳望京"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"beginDateTime": "2021-01-01 00:00:00",
|
"sliceRecordCount": 1
|
||||||
"endDateTime": "2021-01-01 01:00:00"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"writer": {
|
"writer": {
|
||||||
"name": "tdenginewriter",
|
"name": "tdenginewriter",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"host": "192.168.1.180",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
"username": "root",
|
||||||
"password": "taosdata"
|
"password": "taosdata",
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"setting": {
|
|
||||||
"speed": {
|
|
||||||
"channel": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3.1.2 Configuration
|
|
||||||
|
|
||||||
| Parameter | Description | Required | Default |
|
|
||||||
| --------- | ------------------------------ | -------- | -------- |
|
|
||||||
| host | host of TDengine | Yes | |
|
|
||||||
| port | port of TDengine | Yes | |
|
|
||||||
| username | use name of TDengine | No | root |
|
|
||||||
| password | password of TDengine | No | taosdata |
|
|
||||||
| dbName | name of target database | No | |
|
|
||||||
| batchSize | batch size of insert operation | No | 1 |
|
|
||||||
|
|
||||||
|
|
||||||
#### 3.1.3 Type Convert
|
|
||||||
|
|
||||||
| OpenTSDB Type | DataX Type | TDengine Type |
|
|
||||||
| ---------------- | ---------- | ------------- |
|
|
||||||
| timestamp | Date | timestamp |
|
|
||||||
| Integer(value) | Double | double |
|
|
||||||
| Float(value) | Double | double |
|
|
||||||
| String(value) | String | binary |
|
|
||||||
| Integer(tag) | String | binary |
|
|
||||||
| Float(tag) | String | binary |
|
|
||||||
| String(tag) | String | binary |
|
|
||||||
|
|
||||||
### 3.2 From MongoDB to TDengine
|
|
||||||
|
|
||||||
#### 3.2.1 Sample Setting
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"job": {
|
|
||||||
"setting": {
|
|
||||||
"speed": {
|
|
||||||
"channel": 2
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"content": [
|
|
||||||
{
|
|
||||||
"reader": {
|
|
||||||
"name": "mongodbreader",
|
|
||||||
"parameter": {
|
|
||||||
"address": [
|
|
||||||
"127.0.0.1:27017"
|
|
||||||
],
|
|
||||||
"userName": "user",
|
|
||||||
"mechanism": "SCRAM-SHA-1",
|
|
||||||
"userPassword": "password",
|
|
||||||
"authDb": "admin",
|
|
||||||
"dbName": "test",
|
|
||||||
"collectionName": "stock",
|
|
||||||
"column": [
|
|
||||||
{
|
|
||||||
"name": "stockID",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "tradeTime",
|
|
||||||
"type": "date"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "lastPrice",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "askPrice1",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "bidPrice1",
|
|
||||||
"type": "double"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "volume",
|
|
||||||
"type": "int"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"writer": {
|
|
||||||
"name": "tdenginewriter",
|
|
||||||
"parameter": {
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
|
||||||
"password": "taosdata",
|
|
||||||
"stable": "stock",
|
|
||||||
"tagColumn": {
|
|
||||||
"industry": "energy",
|
|
||||||
"stockID": 0
|
|
||||||
},
|
|
||||||
"fieldColumn": {
|
|
||||||
"lastPrice": 2,
|
|
||||||
"askPrice1": 3,
|
|
||||||
"bidPrice1": 4,
|
|
||||||
"volume": 5
|
|
||||||
},
|
|
||||||
"timestampColumn": {
|
|
||||||
"tradeTime": 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note:the writer part of this setting can also apply to other data source except for OpenTSDB **
|
|
||||||
|
|
||||||
|
|
||||||
#### 3.2.2 Configuration
|
|
||||||
|
|
||||||
| Parameter | Description | Required | Default | Remark |
|
|
||||||
| --------------- | --------------------------------------------------------------- | ------------------------ | -------- | ------------------- |
|
|
||||||
| host | host ofTDengine | Yes | |
|
|
||||||
| port | port of TDengine | Yes | |
|
|
||||||
| username | username of TDengine | No | root |
|
|
||||||
| password | password of TDengine | No | taosdata |
|
|
||||||
| dbName | name of target database | Yes | |
|
|
||||||
| batchSize | batch size of insert operation | No | 1000 |
|
|
||||||
| stable | name of target super table | Yes(except for OpenTSDB) | |
|
|
||||||
| tagColumn | name and position of tag columns in the record from reader, format:{tagName1: tagInd1, tagName2: tagInd2} | No | | index starts with 0 |
|
|
||||||
| fieldColumn | name and position of data columns in the record from reader, format: {fdName1: fdInd1, fdName2: fdInd2} | No | | |
|
|
||||||
| timestampColumn | name and position of timestamp column in the record from reader | No | | |
|
|
||||||
|
|
||||||
**Note**: You see that the value of tagColumn "industry" is a fixed string, this ia a good feature of this plugin. Think about this scenario: you have many tables with the structure and one table corresponds to one device. You want to use the device number as a tag in the target super table, then this feature is designed for you.
|
|
||||||
|
|
||||||
#### 3.2.3 Auto table creating
|
|
||||||
##### 3.2.3.1 Rules
|
|
||||||
|
|
||||||
If all of `tagColumn`, `fieldColumn` and `timestampColumn` are offered in writer configuration, then target super table will be created automatically.
|
|
||||||
The type of tag columns will always be `NCHAR(64)`. The sample setting above will produce following sql:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
CREATE STABLE IF NOT EXISTS market_snapshot (
|
|
||||||
tadetime TIMESTAMP,
|
|
||||||
lastprice DOUBLE,
|
|
||||||
askprice1 DOUBLE,
|
|
||||||
bidprice1 DOUBLE,
|
|
||||||
volume INT
|
|
||||||
)
|
|
||||||
TAGS(
|
|
||||||
industry NCHAR(64),
|
|
||||||
stockID NCHAR(64)
|
|
||||||
);
|
|
||||||
```
|
|
||||||
|
|
||||||
##### 3.2.3.2 Sub-table Creating Rules
|
|
||||||
|
|
||||||
The structure of sub-tables are the same with structure of super table. The names of sub-tables are generated by rules below:
|
|
||||||
1. combine value of tags like this:`tag_value1!tag_value2!tag_value3`.
|
|
||||||
2. compute md5 hash hex of above string, named `md5val`
|
|
||||||
3. use "t_md5val" as sub-table name, in which "t" is fixed prefix.
|
|
||||||
|
|
||||||
#### 3.2.4 Use Pre-created Table
|
|
||||||
|
|
||||||
If you have created super table firstly, then all of tagColumn, fieldColumn and timestampColumn can be omitted. The writer plugin will get table schema by executing `describe stableName`.
|
|
||||||
The order of columns of records received by this plugin must be the same as the order of columns returned by `describe stableName`. For example, if you have super table as below:
|
|
||||||
```
|
|
||||||
Field | Type | Length | Note |
|
|
||||||
=================================================================================
|
|
||||||
ts | TIMESTAMP | 8 | |
|
|
||||||
current | DOUBLE | 8 | |
|
|
||||||
location | BINARY | 10 | TAG |
|
|
||||||
```
|
|
||||||
Then the first columns received by this writer plugin must represent timestamp, the second column must represent current with type double, the third column must represent location with internal type string.
|
|
||||||
|
|
||||||
#### 3.2.5 Remarks
|
|
||||||
|
|
||||||
1. Config keys --tagColumn, fieldColumn and timestampColumn, must be presented or omitted at the same time.
|
|
||||||
2. If above three config keys exist and the target table also exists, then the order of columns defined by the config file and the existed table must be the same.
|
|
||||||
|
|
||||||
#### 3.2.6 Type Convert
|
|
||||||
|
|
||||||
|DataX Type | TDengine Type |
|
|
||||||
|-------------- | ----------------- |
|
|
||||||
|Long | BIGINT |
|
|
||||||
|Double | DOUBLE |
|
|
||||||
|String | NCHAR(64) |
|
|
||||||
|Date | TIMESTAMP |
|
|
||||||
|Boolean | BOOL |
|
|
||||||
|Bytes | BINARY(64) |
|
|
||||||
|
|
||||||
### 3.3 From Relational Database to TDengine
|
|
||||||
|
|
||||||
Take MySQl as example.
|
|
||||||
|
|
||||||
#### 3.3.1 Table Structure in MySQL
|
|
||||||
```sql
|
|
||||||
CREATE TABLE IF NOT EXISTS weather(
|
|
||||||
station varchar(100),
|
|
||||||
latitude DOUBLE,
|
|
||||||
longtitude DOUBLE,
|
|
||||||
`date` DATE,
|
|
||||||
TMAX int,
|
|
||||||
TMIN int
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3.3.2 Sample Setting
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"job": {
|
|
||||||
"content": [
|
|
||||||
{
|
|
||||||
"reader": {
|
|
||||||
"name": "mysqlreader",
|
|
||||||
"parameter": {
|
|
||||||
"username": "root",
|
|
||||||
"password": "passw0rd",
|
|
||||||
"column": [
|
"column": [
|
||||||
"*"
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"temperature",
|
||||||
|
"humidity",
|
||||||
|
"is_normal",
|
||||||
|
"device_id",
|
||||||
|
"address"
|
||||||
],
|
],
|
||||||
"splitPk": "station",
|
|
||||||
"connection": [
|
"connection": [
|
||||||
{
|
{
|
||||||
"table": [
|
"table": [
|
||||||
"weather"
|
"weather"
|
||||||
],
|
],
|
||||||
"jdbcUrl": [
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/test"
|
||||||
"jdbc:mysql://127.0.0.1:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf8"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
}
|
"batchSize": 100,
|
||||||
},
|
"ignoreTagsUnmatched": true
|
||||||
"writer": {
|
|
||||||
"name": "tdenginewriter",
|
|
||||||
"parameter": {
|
|
||||||
"host": "127.0.0.1",
|
|
||||||
"port": 6030,
|
|
||||||
"dbName": "test",
|
|
||||||
"username": "root",
|
|
||||||
"password": "taosdata",
|
|
||||||
"batchSize": 1000,
|
|
||||||
"stable": "weather",
|
|
||||||
"tagColumn": {
|
|
||||||
"station": 0
|
|
||||||
},
|
|
||||||
"fieldColumn": {
|
|
||||||
"latitude": 1,
|
|
||||||
"longtitude": 2,
|
|
||||||
"tmax": 4,
|
|
||||||
"tmin": 5
|
|
||||||
},
|
|
||||||
"timestampColumn":{
|
|
||||||
"date": 3
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -316,41 +118,79 @@ CREATE TABLE IF NOT EXISTS weather(
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### 3.2 Configuration
|
||||||
|
|
||||||
## 4 Performance Test
|
* jdbcUrl
|
||||||
|
* Descrption: Data source JDBC connection information, TDengine JDBC information please refer to: [Java connector](https://www.taosdata.com/docs/cn/v2.0/connector/java#url)
|
||||||
|
* Required: yes
|
||||||
|
* Default: none
|
||||||
|
* username
|
||||||
|
* Descrption: username
|
||||||
|
* Required: yes
|
||||||
|
* Default: none
|
||||||
|
|
||||||
|
* password
|
||||||
|
* Descrption: password of username
|
||||||
|
* Required: yes
|
||||||
|
* Default: none
|
||||||
|
|
||||||
|
* table
|
||||||
|
* Descrption: A list of table names that should contain all of the columns in the column parameter (except tbname). Note that tbname in column is used as the TDengine sub-table name.
|
||||||
|
* Required: yes
|
||||||
|
* Default: none
|
||||||
|
|
||||||
|
* column
|
||||||
|
* Descrption: A list of field names, the order of the fields should be the column in the record
|
||||||
|
* Required: yes
|
||||||
|
* Default: none
|
||||||
|
|
||||||
|
* batchSize
|
||||||
|
* Descrption: Each batchSize record is written to a batch
|
||||||
|
* Required: no
|
||||||
|
* Default: 1
|
||||||
|
|
||||||
|
* ignoreTagsUnmatched
|
||||||
|
* Descrption: When table is a sub-table in TDengine, table has a tag value. If the tag value of the data and the tag value of the table are not equal, the data is not written to the table.
|
||||||
|
* Required: no
|
||||||
|
* Default: false
|
||||||
|
|
||||||
|
|
||||||
|
#### 3.3 Type Convert
|
||||||
|
|
||||||
|
Data types in datax that can be mapped to data types in TDengine
|
||||||
|
|
||||||
|
| DataX Type | TDengine Type |
|
||||||
|
| ---------- | ----------------------------------------- |
|
||||||
|
| INT | TINYINT, SMALLINT, INT |
|
||||||
|
| LONG | TIMESTAMP, TINYINT, SMALLINT, INT, BIGINT |
|
||||||
|
| DOUBLE | FLOAT, DOUBLE |
|
||||||
|
| STRING | TIMESTAMP, BINARY, NCHAR |
|
||||||
|
| BOOL | BOOL |
|
||||||
|
| DATE | TIMESTAMP |
|
||||||
|
| BYTES | BINARY |
|
||||||
|
|
||||||
|
### 3.2 From MongoDB to TDengine
|
||||||
|
|
||||||
|
Here are some examples of data sources migrating to TDengine
|
||||||
|
|
||||||
|
| Sample | Configuration |
|
||||||
|
| -------------------- | ------------------------------------------------------------ |
|
||||||
|
| TDengine to TDengine | [super table to super table with tbname](../src/test/resources/t2t-1.json) |
|
||||||
|
| TDengine to TDengine | [super table to super table without tbname](../src/test/resources/t2t-2.json) |
|
||||||
|
| TDengine to TDengine | [super table to sub-table](../src/test/resources/t2t-3.json) |
|
||||||
|
| TDengine to TDengine | [table to table](../src/test/resources/t2t-4.json) |
|
||||||
|
| RDBMS to TDengine | [table to super table with tbname](../src/test/resources/dm2t-1.json) |
|
||||||
|
| RDBMS to TDengine | [table to super table without tbname](../src/test/resources/dm2t-2.json) |
|
||||||
|
| RDBMS to TDengine | [table to sub-table](../src/test/resources/dm2t-3.json) |
|
||||||
|
| RDBMS to TDengine | [table to table](../src/test/resources/dm2t-4.json) |
|
||||||
|
| OpenTSDB to TDengine | [metric to table](../src/test/resources/o2t-1.json) |
|
||||||
|
|
||||||
|
## 4 Restriction
|
||||||
|
|
||||||
## 5 Restriction
|
|
||||||
|
|
||||||
1. NCHAR type has fixed length 64 when auto creating stable.
|
|
||||||
2. Rows have null tag values will be dropped.
|
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
|
||||||
### How to filter on source table?
|
|
||||||
|
|
||||||
It depends on reader plugin. For different reader plugins, the way may be different.
|
|
||||||
|
|
||||||
### How to import multiple source tables at once?
|
|
||||||
|
|
||||||
It depends on reader plugin. If the reader plugin supports reading multiple tables at once, then there is no problem.
|
|
||||||
|
|
||||||
### How many sub-tables will be produced?
|
|
||||||
|
|
||||||
The number of sub-tables is determined by tagColumns, equals to the number of different combinations of tag values.
|
|
||||||
|
|
||||||
### Do columns in source table and columns in target table must be in the same order?
|
### Do columns in source table and columns in target table must be in the same order?
|
||||||
|
|
||||||
No. TDengine require the first column has timestamp type,which is followed by data columns, followed by tag columns. The writer plugin will create super table in this column order, regardless of origin column orders.
|
Yes, TDengineWriter parses the data from the Datax in the order of the fields in the column.
|
||||||
|
|
||||||
### How dose the plugin infer the data type of incoming data?
|
|
||||||
|
|
||||||
By the first batch of records it received.
|
|
||||||
|
|
||||||
### Why can't I insert data of 10 years ago? Do this will get error: `TDengine ERROR (2350): failed to execute batch bind`.
|
|
||||||
|
|
||||||
Because the database you created only keep 10 years data by default, you can create table like this: `CREATE DATABASE power KEEP 36500;`, in order to enlarge the time period to 100 years.
|
|
||||||
|
|
||||||
|
|
||||||
### What should I do if some dependencies of a plugin can't be found?
|
|
||||||
|
|
||||||
I this plugin is not necessary for you, just remove it from pom.xml under project's root directory.
|
|
||||||
|
@ -19,11 +19,31 @@
|
|||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.alibaba</groupId>
|
||||||
|
<artifactId>fastjson</artifactId>
|
||||||
|
<version>1.2.78</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.taosdata.jdbc</groupId>
|
<groupId>com.taosdata.jdbc</groupId>
|
||||||
<artifactId>taos-jdbcdriver</artifactId>
|
<artifactId>taos-jdbcdriver</artifactId>
|
||||||
<version>2.0.34</version>
|
<version>2.0.37</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.alibaba</groupId>
|
||||||
|
<artifactId>fastjson</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-lang3</artifactId>
|
||||||
|
<version>${commons-lang3-version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.alibaba.datax</groupId>
|
<groupId>com.alibaba.datax</groupId>
|
||||||
<artifactId>datax-common</artifactId>
|
<artifactId>datax-common</artifactId>
|
||||||
@ -36,12 +56,6 @@
|
|||||||
</exclusions>
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.taosdata.jdbc</groupId>
|
|
||||||
<artifactId>taos-jdbcdriver</artifactId>
|
|
||||||
<version>2.0.34</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
@ -49,10 +63,27 @@
|
|||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>com.alibaba.datax</groupId>
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>datax-core</artifactId>
|
||||||
<version>${commons-lang3-version}</version>
|
<version>0.0.1-SNAPSHOT</version>
|
||||||
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>mysql</groupId>
|
||||||
|
<artifactId>mysql-connector-java</artifactId>
|
||||||
|
<version>5.1.49</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<!-- 添加 dm8 jdbc jar 包依赖-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.dameng</groupId>
|
||||||
|
<artifactId>dm-jdbc</artifactId>
|
||||||
|
<version>1.8</version>
|
||||||
|
<scope>system</scope>
|
||||||
|
<systemPath>${project.basedir}/src/test/resources/DmJdbcDriver18.jar
|
||||||
|
</systemPath>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
@ -0,0 +1,24 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
public class ColumnMeta {
|
||||||
|
String field;
|
||||||
|
String type;
|
||||||
|
int length;
|
||||||
|
String note;
|
||||||
|
boolean isTag;
|
||||||
|
boolean isPrimaryKey;
|
||||||
|
Object value;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "ColumnMeta{" +
|
||||||
|
"field='" + field + '\'' +
|
||||||
|
", type='" + type + '\'' +
|
||||||
|
", length=" + length +
|
||||||
|
", note='" + note + '\'' +
|
||||||
|
", isTag=" + isTag +
|
||||||
|
", isPrimaryKey=" + isPrimaryKey +
|
||||||
|
", value=" + value +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,8 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
public class Constants {
|
||||||
|
public static final String DEFAULT_USERNAME = "root";
|
||||||
|
public static final String DEFAULT_PASSWORD = "taosdata";
|
||||||
|
public static final int DEFAULT_BATCH_SIZE = 1;
|
||||||
|
public static final boolean DEFAULT_IGNORE_TAGS_UNMATCHED = false;
|
||||||
|
}
|
@ -1,12 +1,8 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
import com.alibaba.datax.common.plugin.RecordReceiver;
|
import com.alibaba.datax.common.plugin.RecordReceiver;
|
||||||
|
|
||||||
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
||||||
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
public interface DataHandler {
|
public interface DataHandler {
|
||||||
|
int handle(RecordReceiver lineReceiver, TaskPluginCollector collector);
|
||||||
long handle(RecordReceiver lineReceiver, Properties properties, TaskPluginCollector collector);
|
|
||||||
}
|
}
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
public class DataHandlerFactory {
|
|
||||||
|
|
||||||
public static DataHandler build(String peerPluginName) {
|
|
||||||
if (peerPluginName.equals("opentsdbreader"))
|
|
||||||
return new OpentsdbDataHandler();
|
|
||||||
return new DefaultDataHandler();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,108 +1,463 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.common.element.Column;
|
||||||
import com.alibaba.datax.common.element.Record;
|
import com.alibaba.datax.common.element.Record;
|
||||||
|
import com.alibaba.datax.common.exception.DataXException;
|
||||||
import com.alibaba.datax.common.plugin.RecordReceiver;
|
import com.alibaba.datax.common.plugin.RecordReceiver;
|
||||||
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
||||||
import com.taosdata.jdbc.TSDBDriver;
|
import com.alibaba.datax.common.util.Configuration;
|
||||||
import com.taosdata.jdbc.TSDBPreparedStatement;
|
import com.taosdata.jdbc.SchemalessWriter;
|
||||||
|
import com.taosdata.jdbc.enums.SchemalessProtocolType;
|
||||||
|
import com.taosdata.jdbc.enums.SchemalessTimestampType;
|
||||||
|
import com.taosdata.jdbc.utils.Utils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.sql.Connection;
|
import java.sql.*;
|
||||||
import java.sql.DriverManager;
|
import java.util.*;
|
||||||
import java.sql.SQLException;
|
import java.util.Date;
|
||||||
import java.util.Properties;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
|
||||||
* 默认DataHandler
|
|
||||||
*/
|
|
||||||
public class DefaultDataHandler implements DataHandler {
|
public class DefaultDataHandler implements DataHandler {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(DefaultDataHandler.class);
|
private static final Logger LOG = LoggerFactory.getLogger(DefaultDataHandler.class);
|
||||||
|
|
||||||
static {
|
private String username;
|
||||||
try {
|
private String password;
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
private String jdbcUrl;
|
||||||
} catch (ClassNotFoundException e) {
|
private int batchSize;
|
||||||
e.printStackTrace();
|
private boolean ignoreTagsUnmatched;
|
||||||
}
|
|
||||||
|
private List<String> tables;
|
||||||
|
private List<String> columns;
|
||||||
|
|
||||||
|
private Map<String, TableMeta> tableMetas;
|
||||||
|
private SchemaManager schemaManager;
|
||||||
|
|
||||||
|
public void setTableMetas(Map<String, TableMeta> tableMetas) {
|
||||||
|
this.tableMetas = tableMetas;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setColumnMetas(Map<String, List<ColumnMeta>> columnMetas) {
|
||||||
|
this.columnMetas = columnMetas;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSchemaManager(SchemaManager schemaManager) {
|
||||||
|
this.schemaManager = schemaManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, List<ColumnMeta>> columnMetas;
|
||||||
|
|
||||||
|
public DefaultDataHandler(Configuration configuration) {
|
||||||
|
this.username = configuration.getString(Key.USERNAME, Constants.DEFAULT_USERNAME);
|
||||||
|
this.password = configuration.getString(Key.PASSWORD, Constants.DEFAULT_PASSWORD);
|
||||||
|
this.jdbcUrl = configuration.getString(Key.JDBC_URL);
|
||||||
|
this.batchSize = configuration.getInt(Key.BATCH_SIZE, Constants.DEFAULT_BATCH_SIZE);
|
||||||
|
this.tables = configuration.getList(Key.TABLE, String.class);
|
||||||
|
this.columns = configuration.getList(Key.COLUMN, String.class);
|
||||||
|
this.ignoreTagsUnmatched = configuration.getBool(Key.IGNORE_TAGS_UNMATCHED, Constants.DEFAULT_IGNORE_TAGS_UNMATCHED);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long handle(RecordReceiver lineReceiver, Properties properties, TaskPluginCollector collector) {
|
public int handle(RecordReceiver lineReceiver, TaskPluginCollector collector) {
|
||||||
SchemaManager schemaManager = new SchemaManager(properties);
|
int count = 0;
|
||||||
if (!schemaManager.configValid()) {
|
int affectedRows = 0;
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try (Connection conn = DriverManager.getConnection(jdbcUrl, username, password)) {
|
||||||
Connection conn = getTaosConnection(properties);
|
LOG.info("connection[ jdbcUrl: " + jdbcUrl + ", username: " + username + "] established.");
|
||||||
if (conn == null) {
|
// prepare table_name -> table_meta
|
||||||
return 0;
|
this.schemaManager = new SchemaManager(conn);
|
||||||
}
|
this.tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
if (schemaManager.shouldGuessSchema()) {
|
// prepare table_name -> column_meta
|
||||||
// 无法从配置文件获取表结构信息,尝试从数据库获取
|
this.columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
LOG.info(Msg.get("try_get_schema_from_db"));
|
|
||||||
boolean success = schemaManager.getFromDB(conn);
|
List<Record> recordBatch = new ArrayList<>();
|
||||||
if (!success) {
|
Record record;
|
||||||
return 0;
|
for (int i = 1; (record = lineReceiver.getFromReader()) != null; i++) {
|
||||||
|
if (i % batchSize != 0) {
|
||||||
|
recordBatch.add(record);
|
||||||
|
} else {
|
||||||
|
affectedRows = writeBatch(conn, recordBatch);
|
||||||
|
recordBatch.clear();
|
||||||
}
|
}
|
||||||
} else {
|
count++;
|
||||||
|
|
||||||
}
|
}
|
||||||
int batchSize = Integer.parseInt(properties.getProperty(Key.BATCH_SIZE, "1000"));
|
|
||||||
if (batchSize < 5) {
|
if (!recordBatch.isEmpty()) {
|
||||||
// batchSize太小,会增加自动类型推断错误的概率,建议改大后重试
|
affectedRows = writeBatch(conn, recordBatch);
|
||||||
LOG.error(Msg.get("batch_size_too_small"));
|
recordBatch.clear();
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
return write(lineReceiver, conn, batchSize, schemaManager, collector);
|
} catch (SQLException e) {
|
||||||
} catch (Exception e) {
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
LOG.error("write failed " + e.getMessage());
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if (affectedRows != count) {
|
||||||
private Connection getTaosConnection(Properties properties) throws SQLException {
|
LOG.error("write record missing or incorrect happened, affectedRows: " + affectedRows + ", total: " + count);
|
||||||
// 检查必要参数
|
|
||||||
String host = properties.getProperty(Key.HOST);
|
|
||||||
String port = properties.getProperty(Key.PORT);
|
|
||||||
String dbname = properties.getProperty(Key.DBNAME);
|
|
||||||
String user = properties.getProperty(Key.USER);
|
|
||||||
String password = properties.getProperty(Key.PASSWORD);
|
|
||||||
if (host == null || port == null || dbname == null || user == null || password == null) {
|
|
||||||
String keys = String.join(" ", Key.HOST, Key.PORT, Key.DBNAME, Key.USER, Key.PASSWORD);
|
|
||||||
LOG.error("Required options missing, please check: " + keys);
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
String jdbcUrl = String.format("jdbc:TAOS://%s:%s/%s?user=%s&password=%s", host, port, dbname, user, password);
|
|
||||||
Properties connProps = new Properties();
|
return affectedRows;
|
||||||
connProps.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8");
|
|
||||||
LOG.info("TDengine connection established, host:{} port:{} dbname:{} user:{}", host, port, dbname, user);
|
|
||||||
return DriverManager.getConnection(jdbcUrl, connProps);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* 使用SQL批量写入<br/>
|
* table: [ "stb1", "stb2", "tb1", "tb2", "t1" ]
|
||||||
*
|
* stb1[ts,f1,f2] tags:[t1]
|
||||||
* @return 成功写入记录数
|
* stb2[ts,f1,f2,f3] tags:[t1,t2]
|
||||||
* @throws SQLException
|
* 1. tables 表的的类型分成:stb(super table)/tb(sub table)/t(original table)
|
||||||
|
* 2. 对于stb,自动建表/schemaless
|
||||||
|
* 2.1: data中有tbname字段, 例如:data: [ts, f1, f2, f3, t1, t2, tbname] tbColumn: [ts, f1, f2, t1] => insert into tbname using stb1 tags(t1) values(ts, f1, f2)
|
||||||
|
* 2.2: data中没有tbname字段,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, t1] => schemaless: stb1,t1=t1 f1=f1,f2=f2 ts, 没有批量写
|
||||||
|
* 3. 对于tb,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, t1] => insert into tb(ts, f1, f2) values(ts, f1, f2)
|
||||||
|
* 4. 对于t,拼sql,例如:data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, f3, t1, t2] insert into t(ts, f1, f2, f3, t1, t2) values(ts, f1, f2, f3, t1, t2)
|
||||||
*/
|
*/
|
||||||
private long write(RecordReceiver lineReceiver, Connection conn, int batchSize, SchemaManager scm, TaskPluginCollector collector) throws SQLException {
|
public int writeBatch(Connection conn, List<Record> recordBatch) {
|
||||||
Record record = lineReceiver.getFromReader();
|
int affectedRows = 0;
|
||||||
if (record == null) {
|
for (String table : tables) {
|
||||||
return 0;
|
TableMeta tableMeta = tableMetas.get(table);
|
||||||
|
switch (tableMeta.tableType) {
|
||||||
|
case SUP_TABLE: {
|
||||||
|
if (columns.contains("tbname"))
|
||||||
|
affectedRows += writeBatchToSupTableBySQL(conn, table, recordBatch);
|
||||||
|
else
|
||||||
|
affectedRows += writeBatchToSupTableBySchemaless(conn, table, recordBatch);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case SUB_TABLE:
|
||||||
|
affectedRows += writeBatchToSubTable(conn, table, recordBatch);
|
||||||
|
break;
|
||||||
|
case NML_TABLE:
|
||||||
|
default:
|
||||||
|
affectedRows += writeBatchToNormalTable(conn, table, recordBatch);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
String pq = String.format("INSERT INTO ? USING %s TAGS(%s) (%s) values (%s)", scm.getStable(), scm.getTagValuesPlaceHolder(), scm.getJoinedFieldNames(), scm.getFieldValuesPlaceHolder());
|
return affectedRows;
|
||||||
LOG.info("Prepared SQL: {}", pq);
|
}
|
||||||
try (TSDBPreparedStatement stmt = (TSDBPreparedStatement) conn.prepareStatement(pq)) {
|
|
||||||
JDBCBatchWriter batchWriter = new JDBCBatchWriter(conn, stmt, scm, batchSize, collector);
|
/**
|
||||||
do {
|
* insert into record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
||||||
batchWriter.append(record);
|
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
||||||
} while ((record = lineReceiver.getFromReader()) != null);
|
* record[idx(tbname)] using table tags(record[idx(t1)]) (ts, f1, f2, f3) values(record[idx(ts)], record[idx(f1)], )
|
||||||
batchWriter.flush();
|
*/
|
||||||
return batchWriter.getCount();
|
private int writeBatchToSupTableBySQL(Connection conn, String table, List<Record> recordBatch) {
|
||||||
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder("insert into");
|
||||||
|
for (Record record : recordBatch) {
|
||||||
|
sb.append(" ").append(record.getColumn(indexOf("tbname")).asString())
|
||||||
|
.append(" using ").append(table)
|
||||||
|
.append(" tags")
|
||||||
|
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return buildColumnValue(colMeta, record);
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")))
|
||||||
|
.append(" ")
|
||||||
|
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return !colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return colMeta.field;
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")))
|
||||||
|
.append(" values")
|
||||||
|
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return !colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return buildColumnValue(colMeta, record);
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")));
|
||||||
|
}
|
||||||
|
String sql = sb.toString();
|
||||||
|
|
||||||
|
return executeUpdate(conn, sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int executeUpdate(Connection conn, String sql) throws DataXException {
|
||||||
|
int count;
|
||||||
|
try (Statement stmt = conn.createStatement()) {
|
||||||
|
LOG.debug(">>> " + sql);
|
||||||
|
count = stmt.executeUpdate(sql);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
|
}
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildColumnValue(ColumnMeta colMeta, Record record) {
|
||||||
|
Column column = record.getColumn(indexOf(colMeta.field));
|
||||||
|
TimestampPrecision timestampPrecision = schemaManager.loadDatabasePrecision();
|
||||||
|
switch (column.getType()) {
|
||||||
|
case DATE: {
|
||||||
|
Date value = column.asDate();
|
||||||
|
switch (timestampPrecision) {
|
||||||
|
case MILLISEC:
|
||||||
|
return "" + (value.getTime());
|
||||||
|
case MICROSEC:
|
||||||
|
return "" + (value.getTime() * 1000);
|
||||||
|
case NANOSEC:
|
||||||
|
return "" + (value.getTime() * 1000_000);
|
||||||
|
default:
|
||||||
|
return "'" + column.asString() + "'";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case BYTES:
|
||||||
|
case STRING:
|
||||||
|
if (colMeta.type.equals("TIMESTAMP"))
|
||||||
|
return "\"" + column.asString() + "\"";
|
||||||
|
String value = column.asString();
|
||||||
|
return "\'" + Utils.escapeSingleQuota(value) + "\'";
|
||||||
|
case NULL:
|
||||||
|
case BAD:
|
||||||
|
return "NULL";
|
||||||
|
case BOOL:
|
||||||
|
case DOUBLE:
|
||||||
|
case INT:
|
||||||
|
case LONG:
|
||||||
|
default:
|
||||||
|
return column.asString();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* table: ["stb1"], column: ["ts", "f1", "f2", "t1"]
|
||||||
|
* data: [ts, f1, f2, f3, t1, t2] tbColumn: [ts, f1, f2, t1] => schemaless: stb1,t1=t1 f1=f1,f2=f2 ts
|
||||||
|
*/
|
||||||
|
private int writeBatchToSupTableBySchemaless(Connection conn, String table, List<Record> recordBatch) {
|
||||||
|
int count = 0;
|
||||||
|
TimestampPrecision timestampPrecision = schemaManager.loadDatabasePrecision();
|
||||||
|
|
||||||
|
List<ColumnMeta> columnMetaList = this.columnMetas.get(table);
|
||||||
|
ColumnMeta ts = columnMetaList.stream().filter(colMeta -> colMeta.isPrimaryKey).findFirst().get();
|
||||||
|
|
||||||
|
List<String> lines = new ArrayList<>();
|
||||||
|
for (Record record : recordBatch) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append(table).append(",")
|
||||||
|
.append(columnMetaList.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
String value = record.getColumn(indexOf(colMeta.field)).asString();
|
||||||
|
if (value.contains(" "))
|
||||||
|
value = value.replace(" ", "\\ ");
|
||||||
|
return colMeta.field + "=" + value;
|
||||||
|
}).collect(Collectors.joining(",")))
|
||||||
|
.append(" ")
|
||||||
|
.append(columnMetaList.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return !colMeta.isTag && !colMeta.isPrimaryKey;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return colMeta.field + "=" + buildSchemalessColumnValue(colMeta, record);
|
||||||
|
// return colMeta.field + "=" + record.getColumn(indexOf(colMeta.field)).asString();
|
||||||
|
}).collect(Collectors.joining(",")))
|
||||||
|
.append(" ");
|
||||||
|
// timestamp
|
||||||
|
Column column = record.getColumn(indexOf(ts.field));
|
||||||
|
Object tsValue = column.getRawData();
|
||||||
|
if (column.getType() == Column.Type.DATE && tsValue instanceof Date) {
|
||||||
|
long time = column.asDate().getTime();
|
||||||
|
switch (timestampPrecision) {
|
||||||
|
case NANOSEC:
|
||||||
|
sb.append(time * 1000000);
|
||||||
|
break;
|
||||||
|
case MICROSEC:
|
||||||
|
sb.append(time * 1000);
|
||||||
|
break;
|
||||||
|
case MILLISEC:
|
||||||
|
default:
|
||||||
|
sb.append(time);
|
||||||
|
}
|
||||||
|
} else if (column.getType() == Column.Type.STRING) {
|
||||||
|
sb.append(Utils.parseTimestamp(column.asString()));
|
||||||
|
} else {
|
||||||
|
sb.append(column.asLong());
|
||||||
|
}
|
||||||
|
String line = sb.toString();
|
||||||
|
LOG.debug(">>> " + line);
|
||||||
|
lines.add(line);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
|
||||||
|
SchemalessWriter writer = new SchemalessWriter(conn);
|
||||||
|
SchemalessTimestampType timestampType;
|
||||||
|
switch (timestampPrecision) {
|
||||||
|
case NANOSEC:
|
||||||
|
timestampType = SchemalessTimestampType.NANO_SECONDS;
|
||||||
|
break;
|
||||||
|
case MICROSEC:
|
||||||
|
timestampType = SchemalessTimestampType.MICRO_SECONDS;
|
||||||
|
break;
|
||||||
|
case MILLISEC:
|
||||||
|
timestampType = SchemalessTimestampType.MILLI_SECONDS;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
timestampType = SchemalessTimestampType.NOT_CONFIGURED;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
writer.write(lines, SchemalessProtocolType.LINE, timestampType);
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
LOG.warn("schemalessWriter does not return affected rows!");
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
|
private long dateAsLong(Column column) {
|
||||||
|
TimestampPrecision timestampPrecision = schemaManager.loadDatabasePrecision();
|
||||||
|
long time = column.asDate().getTime();
|
||||||
|
switch (timestampPrecision) {
|
||||||
|
case NANOSEC:
|
||||||
|
return time * 1000000;
|
||||||
|
case MICROSEC:
|
||||||
|
return time * 1000;
|
||||||
|
case MILLISEC:
|
||||||
|
default:
|
||||||
|
return time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildSchemalessColumnValue(ColumnMeta colMeta, Record record) {
|
||||||
|
Column column = record.getColumn(indexOf(colMeta.field));
|
||||||
|
switch (column.getType()) {
|
||||||
|
case DATE:
|
||||||
|
if (colMeta.type.equals("TIMESTAMP"))
|
||||||
|
return dateAsLong(column) + "i64";
|
||||||
|
return "L'" + column.asString() + "'";
|
||||||
|
case NULL:
|
||||||
|
case BAD:
|
||||||
|
return "NULL";
|
||||||
|
case DOUBLE: {
|
||||||
|
if (colMeta.type.equals("FLOAT"))
|
||||||
|
return column.asString() + "f32";
|
||||||
|
if (colMeta.type.equals("DOUBLE"))
|
||||||
|
return column.asString() + "f64";
|
||||||
|
}
|
||||||
|
case INT:
|
||||||
|
case LONG: {
|
||||||
|
if (colMeta.type.equals("TINYINT"))
|
||||||
|
return column.asString() + "i8";
|
||||||
|
if (colMeta.type.equals("SMALLINT"))
|
||||||
|
return column.asString() + "i16";
|
||||||
|
if (colMeta.type.equals("INT"))
|
||||||
|
return column.asString() + "i32";
|
||||||
|
if (colMeta.type.equals("BIGINT"))
|
||||||
|
return column.asString() + "i64";
|
||||||
|
}
|
||||||
|
case BYTES:
|
||||||
|
case STRING:
|
||||||
|
if (colMeta.type.equals("TIMESTAMP"))
|
||||||
|
return column.asString() + "i64";
|
||||||
|
String value = column.asString();
|
||||||
|
value = value.replace("\"", "\\\"");
|
||||||
|
if (colMeta.type.startsWith("BINARY"))
|
||||||
|
return "\"" + value + "\"";
|
||||||
|
if (colMeta.type.startsWith("NCHAR"))
|
||||||
|
return "L\"" + value + "\"";
|
||||||
|
case BOOL:
|
||||||
|
default:
|
||||||
|
return column.asString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* table: ["tb1"], column: [tbname, ts, f1, f2, t1]
|
||||||
|
* if contains("tbname") and tbname != tb1 continue;
|
||||||
|
* else if t1 != record[idx(t1)] or t2 != record[idx(t2)]... continue;
|
||||||
|
* else
|
||||||
|
* insert into tb1 (ts, f1, f2) values( record[idx(ts)], record[idx(f1)], record[idx(f2)])
|
||||||
|
*/
|
||||||
|
private int writeBatchToSubTable(Connection conn, String table, List<Record> recordBatch) {
|
||||||
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("insert into ").append(table).append(" ")
|
||||||
|
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return !colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return colMeta.field;
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")))
|
||||||
|
.append(" values");
|
||||||
|
int validRecords = 0;
|
||||||
|
for (Record record : recordBatch) {
|
||||||
|
if (columns.contains("tbname") && !table.equals(record.getColumn(indexOf("tbname")).asString()))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
boolean tagsAllMatch = columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return colMeta.isTag;
|
||||||
|
}).allMatch(colMeta -> {
|
||||||
|
Column column = record.getColumn(indexOf(colMeta.field));
|
||||||
|
boolean equals = equals(column, colMeta);
|
||||||
|
return equals;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ignoreTagsUnmatched && !tagsAllMatch)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).filter(colMeta -> {
|
||||||
|
return !colMeta.isTag;
|
||||||
|
}).map(colMeta -> {
|
||||||
|
return buildColumnValue(colMeta, record);
|
||||||
|
}).collect(Collectors.joining(", ", "(", ") ")));
|
||||||
|
validRecords++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validRecords == 0) {
|
||||||
|
LOG.warn("no valid records in this batch");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = sb.toString();
|
||||||
|
return executeUpdate(conn, sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean equals(Column column, ColumnMeta colMeta) {
|
||||||
|
switch (column.getType()) {
|
||||||
|
case BOOL:
|
||||||
|
return column.asBoolean().equals(Boolean.valueOf(colMeta.value.toString()));
|
||||||
|
case INT:
|
||||||
|
case LONG:
|
||||||
|
return column.asLong().equals(Long.valueOf(colMeta.value.toString()));
|
||||||
|
case DOUBLE:
|
||||||
|
return column.asDouble().equals(Double.valueOf(colMeta.value.toString()));
|
||||||
|
case NULL:
|
||||||
|
return colMeta.value == null;
|
||||||
|
case DATE:
|
||||||
|
return column.asDate().getTime() == ((Timestamp) colMeta.value).getTime();
|
||||||
|
case BAD:
|
||||||
|
case BYTES:
|
||||||
|
return Arrays.equals(column.asBytes(), (byte[]) colMeta.value);
|
||||||
|
case STRING:
|
||||||
|
default:
|
||||||
|
return column.asString().equals(colMeta.value.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* table: ["weather"], column: ["ts, f1, f2, f3, t1, t2"]
|
||||||
|
* sql: insert into weather (ts, f1, f2, f3, t1, t2) values( record[idx(ts), record[idx(f1)], ...)
|
||||||
|
*/
|
||||||
|
private int writeBatchToNormalTable(Connection conn, String table, List<Record> recordBatch) {
|
||||||
|
List<ColumnMeta> columnMetas = this.columnMetas.get(table);
|
||||||
|
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("insert into ").append(table)
|
||||||
|
.append(" ")
|
||||||
|
.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).map(colMeta -> {
|
||||||
|
return colMeta.field;
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")))
|
||||||
|
.append(" values ");
|
||||||
|
|
||||||
|
for (Record record : recordBatch) {
|
||||||
|
sb.append(columnMetas.stream().filter(colMeta -> columns.contains(colMeta.field)).map(colMeta -> {
|
||||||
|
return buildColumnValue(colMeta, record);
|
||||||
|
}).collect(Collectors.joining(",", "(", ")")));
|
||||||
|
}
|
||||||
|
|
||||||
|
String sql = sb.toString();
|
||||||
|
return executeUpdate(conn, sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int indexOf(String colName) throws DataXException {
|
||||||
|
for (int i = 0; i < columns.size(); i++) {
|
||||||
|
if (columns.get(i).equals(colName))
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION,
|
||||||
|
"cannot find col: " + colName + " in columns: " + columns);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
@ -1,244 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
import com.alibaba.datax.common.element.Column;
|
|
||||||
import com.alibaba.datax.common.element.Record;
|
|
||||||
import com.alibaba.datax.common.exception.DataXException;
|
|
||||||
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
|
||||||
import com.taosdata.jdbc.TSDBPreparedStatement;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.sql.Connection;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 使用JDBC原生写入接口批量写入。<br/>
|
|
||||||
* 有两个限制条件导致批量写入的代码逻辑过于复杂,以至于需要开发新的类来封装。<br/>
|
|
||||||
* 1. 用户必须提前把需要批量写入的数据搜集到ArrayList中
|
|
||||||
* 2. 每批写入的表名必须相同。
|
|
||||||
* 这个类的实现逻辑是:
|
|
||||||
* 1. 先把属于同一子表的Record缓存起来
|
|
||||||
* 2. 缓存的数量达到batchSize阈值,自动执行一次批量写入
|
|
||||||
* 3. 最后一批数据需要用户手动flush才能写入
|
|
||||||
*/
|
|
||||||
public class JDBCBatchWriter {
|
|
||||||
public static final Logger LOG = LoggerFactory.getLogger(JDBCBatchWriter.class);
|
|
||||||
private TSDBPreparedStatement stmt;
|
|
||||||
private SchemaManager scm;
|
|
||||||
private Connection conn;
|
|
||||||
private int batchSize;
|
|
||||||
private TaskPluginCollector collector;
|
|
||||||
|
|
||||||
// 缓存Record, key为tableName
|
|
||||||
Map<String, List<Record>> buf = new HashMap<>();
|
|
||||||
// 缓存表的标签值, key为tableName
|
|
||||||
Map<String, String[]> tableTagValues = new HashMap<>();
|
|
||||||
private long sucCount = 0;
|
|
||||||
private final int tsColIndex;
|
|
||||||
private List<String> fieldList;
|
|
||||||
// 每个record至少应该包含的列数,用于校验数据
|
|
||||||
private int minColNum = 0;
|
|
||||||
private Map<String, Integer> fieldIndexMap;
|
|
||||||
private List<Column.Type> fieldTypes = null;
|
|
||||||
|
|
||||||
public JDBCBatchWriter(Connection conn, TSDBPreparedStatement stmt, SchemaManager scm, int batchSize, TaskPluginCollector collector) {
|
|
||||||
this.conn = conn;
|
|
||||||
this.stmt = stmt;
|
|
||||||
this.scm = scm;
|
|
||||||
this.batchSize = batchSize;
|
|
||||||
this.collector = collector;
|
|
||||||
this.tsColIndex = scm.getTsColIndex();
|
|
||||||
this.fieldList = scm.getFieldList();
|
|
||||||
this.fieldIndexMap = scm.getFieldIndexMap();
|
|
||||||
this.minColNum = 1 + fieldList.size() + scm.getDynamicTagCount();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public void initFiledTypesAndTargetTable(List<Record> records) throws SQLException {
|
|
||||||
if (fieldTypes != null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
guessFieldTypes(records);
|
|
||||||
if (scm.shouldCreateTable()) {
|
|
||||||
scm.createSTable(conn, fieldTypes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void append(Record record) throws SQLException {
|
|
||||||
int columnNum = record.getColumnNumber();
|
|
||||||
if (columnNum < minColNum) {
|
|
||||||
// 实际列数小于期望列数
|
|
||||||
collector.collectDirtyRecord(record, Msg.get("column_number_error"));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
String[] tagValues = scm.getTagValuesFromRecord(record);
|
|
||||||
if (tagValues == null) {
|
|
||||||
// 标签列包含null
|
|
||||||
collector.collectDirtyRecord(record, Msg.get("tag_value_error"));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!scm.hasTimestamp(record)) {
|
|
||||||
// 时间戳列为null或类型错误
|
|
||||||
collector.collectDirtyRecord(record, Msg.get("ts_value_error"));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
String tableName = scm.computeTableName(tagValues);
|
|
||||||
if (buf.containsKey(tableName)) {
|
|
||||||
List<Record> lis = buf.get(tableName);
|
|
||||||
lis.add(record);
|
|
||||||
if (lis.size() == batchSize) {
|
|
||||||
if (fieldTypes == null) {
|
|
||||||
initFiledTypesAndTargetTable(lis);
|
|
||||||
}
|
|
||||||
executeBatch(tableName);
|
|
||||||
lis.clear();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
List<Record> lis = new ArrayList<>(batchSize);
|
|
||||||
lis.add(record);
|
|
||||||
buf.put(tableName, lis);
|
|
||||||
tableTagValues.put(tableName, tagValues);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 只有String类型比较特别,测试发现值为null的列会转成String类型。所以Column的类型为String并不代表这一列的类型真的是String。
|
|
||||||
*
|
|
||||||
* @param records
|
|
||||||
*/
|
|
||||||
private void guessFieldTypes(List<Record> records) {
|
|
||||||
fieldTypes = new ArrayList<>(fieldList.size());
|
|
||||||
for (int i = 0; i < fieldList.size(); ++i) {
|
|
||||||
int colIndex = fieldIndexMap.get(fieldList.get(i));
|
|
||||||
boolean ok = false;
|
|
||||||
for (int j = 0; j < records.size() && !ok; ++j) {
|
|
||||||
Column column = records.get(j).getColumn(colIndex);
|
|
||||||
Column.Type type = column.getType();
|
|
||||||
switch (type) {
|
|
||||||
case LONG:
|
|
||||||
case DOUBLE:
|
|
||||||
case DATE:
|
|
||||||
case BOOL:
|
|
||||||
case BYTES:
|
|
||||||
if (column.getRawData() != null) {
|
|
||||||
fieldTypes.add(type);
|
|
||||||
ok = true;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case STRING:
|
|
||||||
// 只有非null且非空的String列,才会被真的当作String类型。
|
|
||||||
String value = column.asString();
|
|
||||||
if (value != null && !"".equals(value)) {
|
|
||||||
fieldTypes.add(type);
|
|
||||||
ok = true;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, fieldTypes.get(i).toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!ok) {
|
|
||||||
// 根据采样的%d条数据,无法推断第%d列的数据类型
|
|
||||||
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, String.format(Msg.get("infer_column_type_error"), records.size(), i + 1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
LOG.info("Field Types: {}", fieldTypes);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 执行单表批量写入
|
|
||||||
*
|
|
||||||
* @param tableName
|
|
||||||
* @throws SQLException
|
|
||||||
*/
|
|
||||||
private void executeBatch(String tableName) throws SQLException {
|
|
||||||
// 表名
|
|
||||||
stmt.setTableName(tableName);
|
|
||||||
List<Record> records = buf.get(tableName);
|
|
||||||
// 标签
|
|
||||||
String[] tagValues = tableTagValues.get(tableName);
|
|
||||||
LOG.debug("executeBatch {}", String.join(",", tagValues));
|
|
||||||
for (int i = 0; i < tagValues.length; ++i) {
|
|
||||||
stmt.setTagNString(i, tagValues[i]);
|
|
||||||
}
|
|
||||||
// 时间戳
|
|
||||||
ArrayList<Long> tsList = records.stream().map(r -> r.getColumn(tsColIndex).asDate().getTime()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setTimestamp(0, tsList);
|
|
||||||
// 字段
|
|
||||||
for (int i = 0; i < fieldList.size(); ) {
|
|
||||||
String fieldName = fieldList.get(i);
|
|
||||||
int index = fieldIndexMap.get(fieldName);
|
|
||||||
switch (fieldTypes.get(i)) {
|
|
||||||
case LONG:
|
|
||||||
ArrayList<Long> lisLong = records.stream().map(r -> r.getColumn(index).asBigInteger().longValue()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setLong(++i, lisLong);
|
|
||||||
break;
|
|
||||||
case DOUBLE:
|
|
||||||
ArrayList<Double> lisDouble = records.stream().map(r -> r.getColumn(index).asDouble()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setDouble(++i, lisDouble);
|
|
||||||
break;
|
|
||||||
case STRING:
|
|
||||||
ArrayList<String> lisString = records.stream().map(r -> r.getColumn(index).asString()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setNString(++i, lisString, 64);
|
|
||||||
break;
|
|
||||||
case DATE:
|
|
||||||
ArrayList<Long> lisTs = records.stream().map(r -> r.getColumn(index).asBigInteger().longValue()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setTimestamp(++i, lisTs);
|
|
||||||
break;
|
|
||||||
case BOOL:
|
|
||||||
ArrayList<Boolean> lisBool = records.stream().map(r -> r.getColumn(index).asBoolean()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setBoolean(++i, lisBool);
|
|
||||||
break;
|
|
||||||
case BYTES:
|
|
||||||
ArrayList<String> lisBytes = records.stream().map(r -> r.getColumn(index).asString()).collect(Collectors.toCollection(ArrayList::new));
|
|
||||||
stmt.setString(++i, lisBytes, 64);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, fieldTypes.get(i).toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// 执行
|
|
||||||
stmt.columnDataAddBatch();
|
|
||||||
stmt.columnDataExecuteBatch();
|
|
||||||
// 更新计数器
|
|
||||||
sucCount += records.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 把缓存的Record全部写入
|
|
||||||
*/
|
|
||||||
public void flush() throws SQLException {
|
|
||||||
if (fieldTypes == null) {
|
|
||||||
List<Record> records = new ArrayList<>();
|
|
||||||
for (List<Record> lis : buf.values()) {
|
|
||||||
records.addAll(lis);
|
|
||||||
if (records.size() > 100) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (records.size() > 0) {
|
|
||||||
initFiledTypesAndTargetTable(records);
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (String tabName : buf.keySet()) {
|
|
||||||
if (buf.get(tabName).size() > 0) {
|
|
||||||
executeBatch(tabName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stmt.columnDataCloseBatch();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return 成功写入的数据量
|
|
||||||
*/
|
|
||||||
public long getCount() {
|
|
||||||
return sucCount;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,89 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
public class JniConnection {
|
|
||||||
|
|
||||||
private static final long JNI_NULL_POINTER = 0L;
|
|
||||||
private static final int JNI_SUCCESSFUL = 0;
|
|
||||||
public static final String PROPERTY_KEY_CONFIG_DIR = "cfgdir";
|
|
||||||
public static final String PROPERTY_KEY_LOCALE = "locale";
|
|
||||||
public static final String PROPERTY_KEY_CHARSET = "charset";
|
|
||||||
public static final String PROPERTY_KEY_TIME_ZONE = "timezone";
|
|
||||||
|
|
||||||
private long conn;
|
|
||||||
|
|
||||||
static {
|
|
||||||
System.loadLibrary("taos");
|
|
||||||
}
|
|
||||||
|
|
||||||
public JniConnection(Properties props) throws Exception {
|
|
||||||
initImp(props.getProperty(PROPERTY_KEY_CONFIG_DIR, null));
|
|
||||||
|
|
||||||
String locale = props.getProperty(PROPERTY_KEY_LOCALE);
|
|
||||||
if (setOptions(0, locale) < 0) {
|
|
||||||
throw new Exception("Failed to set locale: " + locale + ". System default will be used.");
|
|
||||||
}
|
|
||||||
String charset = props.getProperty(PROPERTY_KEY_CHARSET);
|
|
||||||
if (setOptions(1, charset) < 0) {
|
|
||||||
throw new Exception("Failed to set charset: " + charset + ". System default will be used.");
|
|
||||||
}
|
|
||||||
String timezone = props.getProperty(PROPERTY_KEY_TIME_ZONE);
|
|
||||||
if (setOptions(2, timezone) < 0) {
|
|
||||||
throw new Exception("Failed to set timezone: " + timezone + ". System default will be used.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void open(String host, int port, String dbname, String user, String password) throws Exception {
|
|
||||||
if (this.conn != JNI_NULL_POINTER) {
|
|
||||||
close();
|
|
||||||
this.conn = JNI_NULL_POINTER;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.conn = connectImp(host, port, dbname, user, password);
|
|
||||||
if (this.conn == JNI_NULL_POINTER) {
|
|
||||||
String errMsg = getErrMsgImp(0);
|
|
||||||
throw new Exception(errMsg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void insertOpentsdbJson(String json) throws Exception {
|
|
||||||
if (this.conn == JNI_NULL_POINTER) {
|
|
||||||
throw new Exception("JNI connection is NULL");
|
|
||||||
}
|
|
||||||
|
|
||||||
long result = insertOpentsdbJson(json, this.conn);
|
|
||||||
int errCode = getErrCodeImp(this.conn, result);
|
|
||||||
if (errCode != JNI_SUCCESSFUL) {
|
|
||||||
String errMsg = getErrMsgImp(result);
|
|
||||||
freeResultSetImp(this.conn, result);
|
|
||||||
throw new Exception(errMsg);
|
|
||||||
}
|
|
||||||
freeResultSetImp(this.conn, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void close() throws Exception {
|
|
||||||
int code = this.closeConnectionImp(this.conn);
|
|
||||||
if (code != 0) {
|
|
||||||
throw new Exception("JNI closeConnection failed");
|
|
||||||
}
|
|
||||||
this.conn = JNI_NULL_POINTER;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static native void initImp(String configDir);
|
|
||||||
|
|
||||||
private static native int setOptions(int optionIndex, String optionValue);
|
|
||||||
|
|
||||||
private native long connectImp(String host, int port, String dbName, String user, String password);
|
|
||||||
|
|
||||||
private native int getErrCodeImp(long connection, long pSql);
|
|
||||||
|
|
||||||
private native String getErrMsgImp(long pSql);
|
|
||||||
|
|
||||||
private native void freeResultSetImp(long connection, long pSql);
|
|
||||||
|
|
||||||
private native int closeConnectionImp(long connection);
|
|
||||||
|
|
||||||
private native long insertOpentsdbJson(String json, long pSql);
|
|
||||||
|
|
||||||
}
|
|
@ -1,14 +1,12 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
public class Key {
|
public class Key {
|
||||||
public static final String HOST = "host";
|
public static final String USERNAME = "username";
|
||||||
public static final String PORT = "port";
|
|
||||||
public static final String DBNAME = "dbName";
|
|
||||||
public static final String USER = "username";
|
|
||||||
public static final String PASSWORD = "password";
|
public static final String PASSWORD = "password";
|
||||||
|
public static final String CONNECTION = "connection";
|
||||||
public static final String BATCH_SIZE = "batchSize";
|
public static final String BATCH_SIZE = "batchSize";
|
||||||
public static final String STABLE = "stable";
|
public static final String TABLE = "table";
|
||||||
public static final String TAG_COLUMN = "tagColumn";
|
public static final String JDBC_URL = "jdbcUrl";
|
||||||
public static final String FIELD_COLUMN = "fieldColumn";
|
public static final String COLUMN = "column";
|
||||||
public static final String TIMESTAMP_COLUMN = "timestampColumn";
|
public static final String IGNORE_TAGS_UNMATCHED = "ignoreTagsUnmatched";
|
||||||
}
|
}
|
@ -1,20 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.ResourceBundle;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* i18n message util
|
|
||||||
*/
|
|
||||||
public class Msg {
|
|
||||||
private static ResourceBundle bundle;
|
|
||||||
|
|
||||||
static {
|
|
||||||
bundle = ResourceBundle.getBundle("tdenginewritermsg", Locale.getDefault());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String get(String key) {
|
|
||||||
return bundle.getString(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -5,50 +5,50 @@ import com.alibaba.datax.common.element.Record;
|
|||||||
import com.alibaba.datax.common.exception.DataXException;
|
import com.alibaba.datax.common.exception.DataXException;
|
||||||
import com.alibaba.datax.common.plugin.RecordReceiver;
|
import com.alibaba.datax.common.plugin.RecordReceiver;
|
||||||
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
import com.alibaba.datax.common.plugin.TaskPluginCollector;
|
||||||
|
import com.alibaba.datax.common.util.Configuration;
|
||||||
|
import com.taosdata.jdbc.SchemalessWriter;
|
||||||
|
import com.taosdata.jdbc.enums.SchemalessProtocolType;
|
||||||
|
import com.taosdata.jdbc.enums.SchemalessTimestampType;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.Properties;
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
|
||||||
public class OpentsdbDataHandler implements DataHandler {
|
public class OpentsdbDataHandler implements DataHandler {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(OpentsdbDataHandler.class);
|
private static final Logger LOG = LoggerFactory.getLogger(OpentsdbDataHandler.class);
|
||||||
private static final String DEFAULT_BATCH_SIZE = "1";
|
private SchemalessWriter writer;
|
||||||
|
|
||||||
|
private String jdbcUrl;
|
||||||
|
private String user;
|
||||||
|
private String password;
|
||||||
|
int batchSize;
|
||||||
|
|
||||||
|
public OpentsdbDataHandler(Configuration config) {
|
||||||
|
// opentsdb json protocol use JNI and schemaless API to write
|
||||||
|
this.jdbcUrl = config.getString(Key.JDBC_URL);
|
||||||
|
this.user = config.getString(Key.USERNAME, "root");
|
||||||
|
this.password = config.getString(Key.PASSWORD, "taosdata");
|
||||||
|
this.batchSize = config.getInt(Key.BATCH_SIZE, Constants.DEFAULT_BATCH_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public long handle(RecordReceiver lineReceiver, Properties properties, TaskPluginCollector collector) {
|
public int handle(RecordReceiver lineReceiver, TaskPluginCollector collector) {
|
||||||
// opentsdb json protocol use JNI and schemaless API to write
|
int count = 0;
|
||||||
String host = properties.getProperty(Key.HOST);
|
try (Connection conn = DriverManager.getConnection(jdbcUrl, user, password);) {
|
||||||
int port = Integer.parseInt(properties.getProperty(Key.PORT));
|
LOG.info("connection[ jdbcUrl: " + jdbcUrl + ", username: " + user + "] established.");
|
||||||
String dbname = properties.getProperty(Key.DBNAME);
|
writer = new SchemalessWriter(conn);
|
||||||
String user = properties.getProperty(Key.USER);
|
count = write(lineReceiver, batchSize);
|
||||||
String password = properties.getProperty(Key.PASSWORD);
|
|
||||||
|
|
||||||
JniConnection conn = null;
|
|
||||||
long count = 0;
|
|
||||||
try {
|
|
||||||
conn = new JniConnection(properties);
|
|
||||||
conn.open(host, port, dbname, user, password);
|
|
||||||
LOG.info("TDengine connection established, host: " + host + ", port: " + port + ", dbname: " + dbname + ", user: " + user);
|
|
||||||
int batchSize = Integer.parseInt(properties.getProperty(Key.BATCH_SIZE, DEFAULT_BATCH_SIZE));
|
|
||||||
count = writeOpentsdb(lineReceiver, conn, batchSize);
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.error(e.getMessage());
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e);
|
||||||
e.printStackTrace();
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
if (conn != null)
|
|
||||||
conn.close();
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
LOG.info("TDengine connection closed");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
private long writeOpentsdb(RecordReceiver lineReceiver, JniConnection conn, int batchSize) {
|
private int write(RecordReceiver lineReceiver, int batchSize) throws DataXException {
|
||||||
long recordIndex = 1;
|
int recordIndex = 1;
|
||||||
try {
|
try {
|
||||||
Record record;
|
Record record;
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
@ -56,14 +56,14 @@ public class OpentsdbDataHandler implements DataHandler {
|
|||||||
if (batchSize == 1) {
|
if (batchSize == 1) {
|
||||||
String jsonData = recordToString(record);
|
String jsonData = recordToString(record);
|
||||||
LOG.debug(">>> " + jsonData);
|
LOG.debug(">>> " + jsonData);
|
||||||
conn.insertOpentsdbJson(jsonData);
|
writer.write(jsonData, SchemalessProtocolType.JSON, SchemalessTimestampType.NOT_CONFIGURED);
|
||||||
} else if (recordIndex % batchSize == 1) {
|
} else if (recordIndex % batchSize == 1) {
|
||||||
sb.append("[").append(recordToString(record)).append(",");
|
sb.append("[").append(recordToString(record)).append(",");
|
||||||
} else if (recordIndex % batchSize == 0) {
|
} else if (recordIndex % batchSize == 0) {
|
||||||
sb.append(recordToString(record)).append("]");
|
sb.append(recordToString(record)).append("]");
|
||||||
String jsonData = sb.toString();
|
String jsonData = sb.toString();
|
||||||
LOG.debug(">>> " + jsonData);
|
LOG.debug(">>> " + jsonData);
|
||||||
conn.insertOpentsdbJson(jsonData);
|
writer.write(jsonData, SchemalessProtocolType.JSON, SchemalessTimestampType.NOT_CONFIGURED);
|
||||||
sb.delete(0, sb.length());
|
sb.delete(0, sb.length());
|
||||||
} else {
|
} else {
|
||||||
sb.append(recordToString(record)).append(",");
|
sb.append(recordToString(record)).append(",");
|
||||||
@ -72,11 +72,11 @@ public class OpentsdbDataHandler implements DataHandler {
|
|||||||
}
|
}
|
||||||
if (sb.length() != 0 && sb.charAt(0) == '[') {
|
if (sb.length() != 0 && sb.charAt(0) == '[') {
|
||||||
String jsonData = sb.deleteCharAt(sb.length() - 1).append("]").toString();
|
String jsonData = sb.deleteCharAt(sb.length() - 1).append("]").toString();
|
||||||
|
System.err.println(jsonData);
|
||||||
LOG.debug(">>> " + jsonData);
|
LOG.debug(">>> " + jsonData);
|
||||||
conn.insertOpentsdbJson(jsonData);
|
writer.write(jsonData, SchemalessProtocolType.JSON, SchemalessTimestampType.NOT_CONFIGURED);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.error("TDengineWriter ERROR: " + e.getMessage());
|
|
||||||
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e);
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e);
|
||||||
}
|
}
|
||||||
return recordIndex - 1;
|
return recordIndex - 1;
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
import com.alibaba.datax.common.element.Column;
|
|
||||||
import com.alibaba.datax.common.element.Record;
|
|
||||||
import com.alibaba.datax.common.exception.DataXException;
|
import com.alibaba.datax.common.exception.DataXException;
|
||||||
import org.apache.commons.codec.digest.DigestUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@ -12,260 +10,163 @@ import java.sql.ResultSet;
|
|||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.sql.Statement;
|
import java.sql.Statement;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class SchemaManager {
|
public class SchemaManager {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(SchemaManager.class);
|
private static final Logger LOG = LoggerFactory.getLogger(SchemaManager.class);
|
||||||
|
|
||||||
private String stable; // 目标超表名
|
private final Connection conn;
|
||||||
private Map<String, String> fixedTagValue = new HashMap<>(); // 固定标签值 标签名 -> 标签值
|
private TimestampPrecision precision;
|
||||||
private Map<String, Integer> tagIndexMap = new HashMap<>(); // 动态标签值 标签名 -> 列索引
|
|
||||||
private Map<String, Integer> fieldIndexMap = new HashMap<>(); // 字段名 -> 字段索引
|
|
||||||
private String tsColName; // 时间戳列名
|
|
||||||
private int tsColIndex = -1; // 时间戳列索引
|
|
||||||
private List<String> fieldList = new ArrayList<>();
|
|
||||||
private List<String> tagList = new ArrayList<>();
|
|
||||||
private boolean canInferSchemaFromConfig = false;
|
|
||||||
|
|
||||||
|
SchemaManager(Connection conn) {
|
||||||
public SchemaManager() {
|
this.conn = conn;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SchemaManager(Properties properties) {
|
public TimestampPrecision loadDatabasePrecision() throws DataXException {
|
||||||
getFromConfig(properties);
|
if (this.precision != null)
|
||||||
}
|
return this.precision;
|
||||||
|
|
||||||
private String mapDataxType(Column.Type type) {
|
|
||||||
switch (type) {
|
|
||||||
case LONG:
|
|
||||||
return "BIGINT";
|
|
||||||
case DOUBLE:
|
|
||||||
return "DOUBLE";
|
|
||||||
case STRING:
|
|
||||||
return "NCHAR(64)";
|
|
||||||
case DATE:
|
|
||||||
return "TIMESTAMP";
|
|
||||||
case BOOL:
|
|
||||||
return "BOOL";
|
|
||||||
case BYTES:
|
|
||||||
return "BINARY(64)";
|
|
||||||
default:
|
|
||||||
throw DataXException.asDataXException(TDengineWriterErrorCode.TYPE_ERROR, type.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setStable(String stable) {
|
|
||||||
stable = stable;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getStable() {
|
|
||||||
return stable;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void getFromConfig(Properties properties) {
|
|
||||||
stable = properties.getProperty(Key.STABLE);
|
|
||||||
if (stable == null) {
|
|
||||||
LOG.error("Config error: no stable");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (Object key : properties.keySet()) {
|
|
||||||
String k = (String) key;
|
|
||||||
String v = properties.getProperty(k);
|
|
||||||
|
|
||||||
String[] ps = k.split("\\.");
|
|
||||||
if (ps.length == 1) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (k.startsWith(Key.TAG_COLUMN)) {
|
|
||||||
String tagName = ps[1];
|
|
||||||
try {
|
|
||||||
Integer tagIndex = Integer.parseInt(v);
|
|
||||||
this.tagIndexMap.put(tagName, tagIndex);
|
|
||||||
tagList.add(tagName);
|
|
||||||
} catch (NumberFormatException e) {
|
|
||||||
fixedTagValue.put(tagName, v);
|
|
||||||
tagList.add(tagName);
|
|
||||||
}
|
|
||||||
} else if (k.startsWith(Key.FIELD_COLUMN)) {
|
|
||||||
String fieldName = ps[1];
|
|
||||||
Integer fileIndex = Integer.parseInt(v);
|
|
||||||
fieldIndexMap.put(fieldName, fileIndex);
|
|
||||||
} else if (k.startsWith(Key.TIMESTAMP_COLUMN)) {
|
|
||||||
tsColName = ps[1];
|
|
||||||
tsColIndex = Integer.parseInt(v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
List<String> sortedFieldName = fieldIndexMap.entrySet().stream().sorted((x, y) -> x.getValue().compareTo(y.getValue())).map(e -> e.getKey()).collect(Collectors.toList());
|
|
||||||
fieldList.addAll(sortedFieldName); // 排序的目的是保证自动建表时列的顺序和输入数据的列的顺序保持一致
|
|
||||||
canInferSchemaFromConfig = tsColIndex > -1 && !(fixedTagValue.isEmpty() && tagIndexMap.isEmpty()) && !fieldIndexMap.isEmpty();
|
|
||||||
LOG.info("Config file parsed result:fixedTags=[{}] ,tags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", fixedTagValue.keySet()), String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean shouldGuessSchema() {
|
|
||||||
return !canInferSchemaFromConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean shouldCreateTable() {
|
|
||||||
return canInferSchemaFromConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean configValid() {
|
|
||||||
boolean valid = (tagList.size() > 0 && fieldList.size() > 0 && tsColIndex > -1) || (tagList.size() == 0 && fieldList.size() == 0 && tsColIndex == -1);
|
|
||||||
if (!valid) {
|
|
||||||
LOG.error("Config error: tagColumn, fieldColumn and timestampColumn must be present together or absent together.");
|
|
||||||
}
|
|
||||||
return valid;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 通过执行`describe dbname.stable`命令,获取表的schema.<br/>
|
|
||||||
* describe命名返回有4列内容,分布是:Field,Type,Length,Note<br/>
|
|
||||||
*
|
|
||||||
* @return 成功返回true,如果超表不存在或其他错误则返回false
|
|
||||||
*/
|
|
||||||
public boolean getFromDB(Connection conn) {
|
|
||||||
try {
|
|
||||||
List<String> stables = getSTables(conn);
|
|
||||||
if (!stables.contains(stable)) {
|
|
||||||
LOG.error("super table {} not exist, fail to get schema from database.", stable);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} catch (SQLException e) {
|
|
||||||
LOG.error(e.getMessage());
|
|
||||||
e.printStackTrace();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
try (Statement stmt = conn.createStatement()) {
|
try (Statement stmt = conn.createStatement()) {
|
||||||
ResultSet rs = stmt.executeQuery("describe " + stable);
|
ResultSet rs = stmt.executeQuery("select database()");
|
||||||
int colIndex = 0;
|
String dbname = null;
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
String name = rs.getString(1);
|
dbname = rs.getString("database()");
|
||||||
String type = rs.getString(2);
|
}
|
||||||
String note = rs.getString(4);
|
if (dbname == null)
|
||||||
if ("TIMESTAMP".equals(type)) {
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION,
|
||||||
tsColName = name;
|
"Database not specified or available");
|
||||||
tsColIndex = colIndex;
|
|
||||||
} else if ("TAG".equals(note)) {
|
rs = stmt.executeQuery("show databases");
|
||||||
tagIndexMap.put(name, colIndex);
|
while (rs.next()) {
|
||||||
tagList.add(name);
|
String name = rs.getString("name");
|
||||||
} else {
|
if (!name.equalsIgnoreCase(dbname))
|
||||||
fieldIndexMap.put(name, colIndex);
|
continue;
|
||||||
fieldList.add(name);
|
String precision = rs.getString("precision");
|
||||||
}
|
switch (precision) {
|
||||||
colIndex++;
|
case "ns":
|
||||||
|
this.precision = TimestampPrecision.NANOSEC;
|
||||||
|
break;
|
||||||
|
case "us":
|
||||||
|
this.precision = TimestampPrecision.MICROSEC;
|
||||||
|
break;
|
||||||
|
case "ms":
|
||||||
|
default:
|
||||||
|
this.precision = TimestampPrecision.MILLISEC;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
LOG.info("table info:tags=[{}], fields=[{}], tsColName={}, tsIndex={}", String.join(",", tagIndexMap.keySet()), String.join(",", fieldList), tsColName, tsColIndex);
|
|
||||||
return true;
|
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
LOG.error(e.getMessage());
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
e.printStackTrace();
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
return this.precision;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<String> getSTables(Connection conn) throws SQLException {
|
public Map<String, TableMeta> loadTableMeta(List<String> tables) throws DataXException {
|
||||||
List<String> stables = new ArrayList<>();
|
Map<String, TableMeta> tableMetas = new HashMap();
|
||||||
|
|
||||||
try (Statement stmt = conn.createStatement()) {
|
try (Statement stmt = conn.createStatement()) {
|
||||||
ResultSet rs = stmt.executeQuery("show stables");
|
ResultSet rs = stmt.executeQuery("show stables");
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
String name = rs.getString(1);
|
TableMeta tableMeta = buildSupTableMeta(rs);
|
||||||
stables.add(name);
|
if (!tables.contains(tableMeta.tbname))
|
||||||
|
continue;
|
||||||
|
tableMetas.put(tableMeta.tbname, tableMeta);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return stables;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void createSTable(Connection conn, List<Column.Type> fieldTypes) throws SQLException {
|
rs = stmt.executeQuery("show tables");
|
||||||
StringBuilder sb = new StringBuilder();
|
while (rs.next()) {
|
||||||
sb.append("CREATE STABLE IF NOT EXISTS ").append(stable).append("(");
|
TableMeta tableMeta = buildSubTableMeta(rs);
|
||||||
sb.append(tsColName).append(" ").append("TIMESTAMP,");
|
if (!tables.contains(tableMeta.tbname))
|
||||||
for (int i = 0; i < fieldList.size(); ++i) {
|
continue;
|
||||||
String fieldName = fieldList.get(i);
|
tableMetas.put(tableMeta.tbname, tableMeta);
|
||||||
Column.Type dxType = fieldTypes.get(i);
|
|
||||||
sb.append(fieldName).append(' ');
|
|
||||||
String tdType = mapDataxType(dxType);
|
|
||||||
sb.append(tdType).append(',');
|
|
||||||
}
|
|
||||||
sb.deleteCharAt(sb.length() - 1);
|
|
||||||
sb.append(") TAGS(");
|
|
||||||
for (String tagName : tagList) {
|
|
||||||
sb.append(tagName).append(" NCHAR(64),");
|
|
||||||
}
|
|
||||||
sb.deleteCharAt(sb.length() - 1);
|
|
||||||
sb.append(")");
|
|
||||||
String q = sb.toString();
|
|
||||||
LOG.info("run sql:" + q);
|
|
||||||
try (Statement stmt = conn.createStatement()) {
|
|
||||||
stmt.execute(q);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public String[] getTagValuesFromRecord(Record record) {
|
|
||||||
String[] tagValues = new String[tagList.size()];
|
|
||||||
for (int i = 0; i < tagList.size(); ++i) {
|
|
||||||
if (fixedTagValue.containsKey(tagList.get(i))) {
|
|
||||||
tagValues[i] = fixedTagValue.get(tagList.get(i));
|
|
||||||
} else {
|
|
||||||
int tagIndex = tagIndexMap.get(tagList.get(i));
|
|
||||||
tagValues[i] = record.getColumn(tagIndex).asString();
|
|
||||||
}
|
}
|
||||||
if (tagValues[i] == null) {
|
|
||||||
return null;
|
for (String tbname : tables) {
|
||||||
|
if (!tableMetas.containsKey(tbname)) {
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, "table metadata of " + tbname + " is empty!");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
}
|
}
|
||||||
return tagValues;
|
return tableMetas;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasTimestamp(Record record) {
|
public Map<String, List<ColumnMeta>> loadColumnMetas(List<String> tables) throws DataXException {
|
||||||
Column column = record.getColumn(tsColIndex);
|
Map<String, List<ColumnMeta>> ret = new HashMap<>();
|
||||||
if (column.getType() == Column.Type.DATE && column.asDate() != null) {
|
|
||||||
return true;
|
for (String table : tables) {
|
||||||
} else {
|
List<ColumnMeta> columnMetaList = new ArrayList<>();
|
||||||
return false;
|
try (Statement stmt = conn.createStatement()) {
|
||||||
|
ResultSet rs = stmt.executeQuery("describe " + table);
|
||||||
|
for (int i = 0; rs.next(); i++) {
|
||||||
|
ColumnMeta columnMeta = buildColumnMeta(rs, i == 0);
|
||||||
|
columnMetaList.add(columnMeta);
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.RUNTIME_EXCEPTION, e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (columnMetaList.isEmpty()) {
|
||||||
|
LOG.error("column metadata of " + table + " is empty!");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
columnMetaList.stream().filter(colMeta -> colMeta.isTag).forEach(colMeta -> {
|
||||||
|
String sql = "select " + colMeta.field + " from " + table;
|
||||||
|
Object value = null;
|
||||||
|
try (Statement stmt = conn.createStatement()) {
|
||||||
|
ResultSet rs = stmt.executeQuery(sql);
|
||||||
|
for (int i = 0; rs.next(); i++) {
|
||||||
|
value = rs.getObject(colMeta.field);
|
||||||
|
if (i > 0) {
|
||||||
|
value = null;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (SQLException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
colMeta.value = value;
|
||||||
|
});
|
||||||
|
|
||||||
|
LOG.debug("load column metadata of " + table + ": " + Arrays.toString(columnMetaList.toArray()));
|
||||||
|
ret.put(table, columnMetaList);
|
||||||
}
|
}
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, Integer> getFieldIndexMap() {
|
private TableMeta buildSupTableMeta(ResultSet rs) throws SQLException {
|
||||||
return fieldIndexMap;
|
TableMeta tableMeta = new TableMeta();
|
||||||
|
tableMeta.tableType = TableType.SUP_TABLE;
|
||||||
|
tableMeta.tbname = rs.getString("name");
|
||||||
|
tableMeta.columns = rs.getInt("columns");
|
||||||
|
tableMeta.tags = rs.getInt("tags");
|
||||||
|
tableMeta.tables = rs.getInt("tables");
|
||||||
|
|
||||||
|
LOG.debug("load table metadata of " + tableMeta.tbname + ": " + tableMeta);
|
||||||
|
return tableMeta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getFieldList() {
|
private TableMeta buildSubTableMeta(ResultSet rs) throws SQLException {
|
||||||
return fieldList;
|
TableMeta tableMeta = new TableMeta();
|
||||||
|
String stable_name = rs.getString("stable_name");
|
||||||
|
tableMeta.tableType = StringUtils.isBlank(stable_name) ? TableType.NML_TABLE : TableType.SUB_TABLE;
|
||||||
|
tableMeta.tbname = rs.getString("table_name");
|
||||||
|
tableMeta.columns = rs.getInt("columns");
|
||||||
|
tableMeta.stable_name = StringUtils.isBlank(stable_name) ? null : stable_name;
|
||||||
|
|
||||||
|
LOG.debug("load table metadata of " + tableMeta.tbname + ": " + tableMeta);
|
||||||
|
return tableMeta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getJoinedFieldNames() {
|
private ColumnMeta buildColumnMeta(ResultSet rs, boolean isPrimaryKey) throws SQLException {
|
||||||
return tsColName + ", " + String.join(", ", fieldList);
|
ColumnMeta columnMeta = new ColumnMeta();
|
||||||
|
columnMeta.field = rs.getString("Field");
|
||||||
|
columnMeta.type = rs.getString("Type");
|
||||||
|
columnMeta.length = rs.getInt("Length");
|
||||||
|
columnMeta.note = rs.getString("Note");
|
||||||
|
columnMeta.isTag = columnMeta.note != null && columnMeta.note.equals("TAG");
|
||||||
|
columnMeta.isPrimaryKey = isPrimaryKey;
|
||||||
|
return columnMeta;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getTsColIndex() {
|
|
||||||
return tsColIndex;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTagValuesPlaceHolder() {
|
|
||||||
return tagList.stream().map(x -> "?").collect(Collectors.joining(","));
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFieldValuesPlaceHolder() {
|
|
||||||
return "?, " + fieldList.stream().map(x -> "?").collect(Collectors.joining(", "));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* 计算子表表名
|
|
||||||
* <ol>
|
|
||||||
* <li>将标签的value 组合成为如下的字符串: tag_value1!tag_value2!tag_value3。</li>
|
|
||||||
* <li>计算该字符串的 MD5 散列值 "md5_val"。</li>
|
|
||||||
* <li>"t_md5val"作为子表名。其中的 "t" 是固定的前缀。</li>
|
|
||||||
* </ol>
|
|
||||||
*
|
|
||||||
* @param tagValues
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public String computeTableName(String[] tagValues) {
|
|
||||||
String s = String.join("!", tagValues);
|
|
||||||
return "t_" + DigestUtils.md5Hex(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getDynamicTagCount() {
|
|
||||||
return tagIndexMap.size();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,37 +1,56 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.common.exception.DataXException;
|
||||||
import com.alibaba.datax.common.plugin.RecordReceiver;
|
import com.alibaba.datax.common.plugin.RecordReceiver;
|
||||||
import com.alibaba.datax.common.spi.Writer;
|
import com.alibaba.datax.common.spi.Writer;
|
||||||
import com.alibaba.datax.common.util.Configuration;
|
import com.alibaba.datax.common.util.Configuration;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class TDengineWriter extends Writer {
|
public class TDengineWriter extends Writer {
|
||||||
|
|
||||||
private static final String PEER_PLUGIN_NAME = "peerPluginName";
|
private static final String PEER_PLUGIN_NAME = "peerPluginName";
|
||||||
|
|
||||||
static {
|
|
||||||
try {
|
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
|
||||||
} catch (ClassNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class Job extends Writer.Job {
|
public static class Job extends Writer.Job {
|
||||||
|
|
||||||
private Configuration originalConfig;
|
private Configuration originalConfig;
|
||||||
|
private static final Logger LOG = LoggerFactory.getLogger(Job.class);
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init() {
|
public void init() {
|
||||||
this.originalConfig = super.getPluginJobConf();
|
this.originalConfig = super.getPluginJobConf();
|
||||||
this.originalConfig.set(PEER_PLUGIN_NAME, getPeerPluginName());
|
this.originalConfig.set(PEER_PLUGIN_NAME, getPeerPluginName());
|
||||||
|
|
||||||
|
// check user
|
||||||
|
String user = this.originalConfig.getString(Key.USERNAME);
|
||||||
|
if (StringUtils.isBlank(user))
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.REQUIRED_VALUE, "The parameter ["
|
||||||
|
+ Key.USERNAME + "] is not set.");
|
||||||
|
|
||||||
|
// check password
|
||||||
|
String password = this.originalConfig.getString(Key.PASSWORD);
|
||||||
|
if (StringUtils.isBlank(password))
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.REQUIRED_VALUE, "The parameter ["
|
||||||
|
+ Key.PASSWORD + "] is not set.");
|
||||||
|
|
||||||
|
// check connection
|
||||||
|
List<Object> connection = this.originalConfig.getList(Key.CONNECTION);
|
||||||
|
if (connection == null || connection.isEmpty())
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.REQUIRED_VALUE, "The parameter ["
|
||||||
|
+ Key.CONNECTION + "] is not set.");
|
||||||
|
if (connection.size() > 1)
|
||||||
|
LOG.warn("connection.size is " + connection.size() + " and only connection[0] will be used.");
|
||||||
|
Configuration conn = Configuration.from(connection.get(0).toString());
|
||||||
|
String jdbcUrl = conn.getString(Key.JDBC_URL);
|
||||||
|
if (StringUtils.isBlank(jdbcUrl))
|
||||||
|
throw DataXException.asDataXException(TDengineWriterErrorCode.REQUIRED_VALUE, "The parameter ["
|
||||||
|
+ Key.JDBC_URL + "] of connection is not set.");
|
||||||
|
|
||||||
|
// check column
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -42,22 +61,30 @@ public class TDengineWriter extends Writer {
|
|||||||
@Override
|
@Override
|
||||||
public List<Configuration> split(int mandatoryNumber) {
|
public List<Configuration> split(int mandatoryNumber) {
|
||||||
List<Configuration> writerSplitConfigs = new ArrayList<>();
|
List<Configuration> writerSplitConfigs = new ArrayList<>();
|
||||||
|
|
||||||
|
List<Object> conns = this.originalConfig.getList(Key.CONNECTION);
|
||||||
for (int i = 0; i < mandatoryNumber; i++) {
|
for (int i = 0; i < mandatoryNumber; i++) {
|
||||||
writerSplitConfigs.add(this.originalConfig);
|
Configuration clone = this.originalConfig.clone();
|
||||||
|
Configuration conf = Configuration.from(conns.get(0).toString());
|
||||||
|
String jdbcUrl = conf.getString(Key.JDBC_URL);
|
||||||
|
clone.set(Key.JDBC_URL, jdbcUrl);
|
||||||
|
clone.set(Key.TABLE, conf.getList(Key.TABLE));
|
||||||
|
clone.remove(Key.CONNECTION);
|
||||||
|
writerSplitConfigs.add(clone);
|
||||||
}
|
}
|
||||||
|
|
||||||
return writerSplitConfigs;
|
return writerSplitConfigs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Task extends Writer.Task {
|
public static class Task extends Writer.Task {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Job.class);
|
private static final Logger LOG = LoggerFactory.getLogger(Task.class);
|
||||||
|
|
||||||
private Configuration writerSliceConfig;
|
private Configuration writerSliceConfig;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init() {
|
public void init() {
|
||||||
this.writerSliceConfig = getPluginJobConf();
|
this.writerSliceConfig = getPluginJobConf();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -67,23 +94,16 @@ public class TDengineWriter extends Writer {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void startWrite(RecordReceiver lineReceiver) {
|
public void startWrite(RecordReceiver lineReceiver) {
|
||||||
Set<String> keys = this.writerSliceConfig.getKeys();
|
|
||||||
Properties properties = new Properties();
|
|
||||||
for (String key : keys) {
|
|
||||||
String value = this.writerSliceConfig.getString(key);
|
|
||||||
properties.setProperty(key, value);
|
|
||||||
}
|
|
||||||
if (!keys.contains(Key.USER)) {
|
|
||||||
properties.setProperty(Key.USER, "root");
|
|
||||||
}
|
|
||||||
if (!keys.contains(Key.PASSWORD)) {
|
|
||||||
properties.setProperty(Key.PASSWORD, "taosdata");
|
|
||||||
}
|
|
||||||
LOG.debug("========================properties==========================\n" + properties);
|
|
||||||
String peerPluginName = this.writerSliceConfig.getString(PEER_PLUGIN_NAME);
|
String peerPluginName = this.writerSliceConfig.getString(PEER_PLUGIN_NAME);
|
||||||
LOG.debug("start to handle record from: " + peerPluginName);
|
LOG.debug("start to handle record from: " + peerPluginName);
|
||||||
DataHandler handler = DataHandlerFactory.build(peerPluginName);
|
|
||||||
long records = handler.handle(lineReceiver, properties, getTaskPluginCollector());
|
DataHandler handler;
|
||||||
|
if (peerPluginName.equals("opentsdbreader"))
|
||||||
|
handler = new OpentsdbDataHandler(this.writerSliceConfig);
|
||||||
|
else
|
||||||
|
handler = new DefaultDataHandler(this.writerSliceConfig);
|
||||||
|
|
||||||
|
long records = handler.handle(lineReceiver, getTaskPluginCollector());
|
||||||
LOG.debug("handle data finished, records: " + records);
|
LOG.debug("handle data finished, records: " + records);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,13 +3,16 @@ package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|||||||
import com.alibaba.datax.common.spi.ErrorCode;
|
import com.alibaba.datax.common.spi.ErrorCode;
|
||||||
|
|
||||||
public enum TDengineWriterErrorCode implements ErrorCode {
|
public enum TDengineWriterErrorCode implements ErrorCode {
|
||||||
RUNTIME_EXCEPTION("TDengineWriter-00", "运行时异常"),
|
|
||||||
TYPE_ERROR("TDengineWriter-00", "Datax类型无法正确映射到TDengine类型");
|
REQUIRED_VALUE("TDengineWriter-00", "缺失必要的值"),
|
||||||
|
ILLEGAL_VALUE("TDengineWriter-01", "值非法"),
|
||||||
|
RUNTIME_EXCEPTION("TDengineWriter-02", "运行时异常"),
|
||||||
|
TYPE_ERROR("TDengineWriter-03", "Datax类型无法正确映射到TDengine类型");
|
||||||
|
|
||||||
private final String code;
|
private final String code;
|
||||||
private final String description;
|
private final String description;
|
||||||
|
|
||||||
private TDengineWriterErrorCode(String code, String description) {
|
TDengineWriterErrorCode(String code, String description) {
|
||||||
this.code = code;
|
this.code = code;
|
||||||
this.description = description;
|
this.description = description;
|
||||||
}
|
}
|
||||||
@ -26,7 +29,6 @@ public enum TDengineWriterErrorCode implements ErrorCode {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return String.format("Code:[%s], Description:[%s]. ", this.code,
|
return String.format("Code:[%s], Description:[%s]. ", this.code, this.description);
|
||||||
this.description);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,22 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
public class TableMeta {
|
||||||
|
TableType tableType;
|
||||||
|
String tbname;
|
||||||
|
int columns;
|
||||||
|
int tags;
|
||||||
|
int tables;
|
||||||
|
String stable_name;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "TableMeta{" +
|
||||||
|
"tableType=" + tableType +
|
||||||
|
", tbname='" + tbname + '\'' +
|
||||||
|
", columns=" + columns +
|
||||||
|
", tags=" + tags +
|
||||||
|
", tables=" + tables +
|
||||||
|
", stable_name='" + stable_name + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,5 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
public enum TableType {
|
||||||
|
SUP_TABLE, SUB_TABLE, NML_TABLE
|
||||||
|
}
|
@ -0,0 +1,5 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
public enum TimestampPrecision {
|
||||||
|
MILLISEC, MICROSEC, NANOSEC
|
||||||
|
}
|
@ -1,105 +0,0 @@
|
|||||||
/* DO NOT EDIT THIS FILE - it is machine generated */
|
|
||||||
#include <jni.h>
|
|
||||||
/* Header for class com_alibaba_datax_plugin_writer_JniConnection */
|
|
||||||
|
|
||||||
#ifndef _Included_com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
#define _Included_com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
#undef com_alibaba_datax_plugin_writer_JniConnection_JNI_NULL_POINTER
|
|
||||||
#define com_alibaba_datax_plugin_writer_JniConnection_JNI_NULL_POINTER 0LL
|
|
||||||
#undef com_alibaba_datax_plugin_writer_JniConnection_JNI_SUCCESSFUL
|
|
||||||
#define com_alibaba_datax_plugin_writer_JniConnection_JNI_SUCCESSFUL 0L
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: initImp
|
|
||||||
* Signature: (Ljava/lang/String;)V
|
|
||||||
*/
|
|
||||||
JNIEXPORT void JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_initImp
|
|
||||||
(JNIEnv *, jclass, jstring);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: setOptions
|
|
||||||
* Signature: (ILjava/lang/String;)I
|
|
||||||
*/
|
|
||||||
JNIEXPORT jint JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_setOptions
|
|
||||||
(JNIEnv *, jclass, jint, jstring);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: getTsCharset
|
|
||||||
* Signature: ()Ljava/lang/String;
|
|
||||||
*/
|
|
||||||
JNIEXPORT jstring JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_getTsCharset
|
|
||||||
(JNIEnv *, jclass);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: connectImp
|
|
||||||
* Signature: (Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)J
|
|
||||||
*/
|
|
||||||
JNIEXPORT jlong JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_connectImp
|
|
||||||
(JNIEnv *, jobject, jstring, jint, jstring, jstring, jstring);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: executeQueryImp
|
|
||||||
* Signature: ([BJ)J
|
|
||||||
*/
|
|
||||||
JNIEXPORT jlong JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_executeQueryImp
|
|
||||||
(JNIEnv *, jobject, jbyteArray, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: getErrCodeImp
|
|
||||||
* Signature: (JJ)I
|
|
||||||
*/
|
|
||||||
JNIEXPORT jint JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_getErrCodeImp
|
|
||||||
(JNIEnv *, jobject, jlong, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: getErrMsgImp
|
|
||||||
* Signature: (J)Ljava/lang/String;
|
|
||||||
*/
|
|
||||||
JNIEXPORT jstring JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_getErrMsgImp
|
|
||||||
(JNIEnv *, jobject, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: getErrMsgByCode
|
|
||||||
* Signature: (J)Ljava/lang/String;
|
|
||||||
*/
|
|
||||||
JNIEXPORT jstring JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_getErrMsgByCode
|
|
||||||
(JNIEnv *, jobject, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: getAffectedRowsImp
|
|
||||||
* Signature: (JJ)I
|
|
||||||
*/
|
|
||||||
JNIEXPORT jint JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_getAffectedRowsImp
|
|
||||||
(JNIEnv *, jobject, jlong, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: closeConnectionImp
|
|
||||||
* Signature: (J)I
|
|
||||||
*/
|
|
||||||
JNIEXPORT jint JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_closeConnectionImp
|
|
||||||
(JNIEnv *, jobject, jlong);
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Class: com_alibaba_datax_plugin_writer_JniConnection
|
|
||||||
* Method: insertOpentsdbJson
|
|
||||||
* Signature: (Ljava/lang/String;J)J
|
|
||||||
*/
|
|
||||||
JNIEXPORT jlong JNICALL Java_com_alibaba_datax_plugin_writer_JniConnection_insertOpentsdbJson
|
|
||||||
(JNIEnv *, jobject, jstring, jlong);
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#endif
|
|
@ -3,7 +3,7 @@
|
|||||||
"class": "com.alibaba.datax.plugin.writer.tdenginewriter.TDengineWriter",
|
"class": "com.alibaba.datax.plugin.writer.tdenginewriter.TDengineWriter",
|
||||||
"description": {
|
"description": {
|
||||||
"useScene": "data migration to tdengine",
|
"useScene": "data migration to tdengine",
|
||||||
"mechanism": "use JNI or taos-jdbc to write data to tdengine."
|
"mechanism": "use taos-jdbcdriver to write data."
|
||||||
},
|
},
|
||||||
"developer": "zyyang-taosdata"
|
"developer": "support@taosdata.com"
|
||||||
}
|
}
|
@ -1,24 +1,20 @@
|
|||||||
{
|
{
|
||||||
"name": "tdenginewriter",
|
"name": "tdenginewriter",
|
||||||
"parameter": {
|
"parameter": {
|
||||||
"host": "127.0.0.1",
|
"username": "root",
|
||||||
"port": 6030,
|
"password": "taosdata",
|
||||||
"dbname": "test",
|
"column": [
|
||||||
"user": "root",
|
""
|
||||||
"password": "taosdata",
|
],
|
||||||
"batchSize": 1000,
|
"connection": [
|
||||||
"stable": "weather",
|
{
|
||||||
"tagColumn": {
|
"table": [
|
||||||
"station": 0
|
""
|
||||||
},
|
],
|
||||||
"fieldColumn": {
|
"jdbcUrl": ""
|
||||||
"latitude": 1,
|
}
|
||||||
"longtitude": 2,
|
],
|
||||||
"tmax": 4,
|
"batchSize": 1000,
|
||||||
"tmin": 5
|
"ignoreTagsUnmatched": true
|
||||||
},
|
}
|
||||||
"timestampColumn":{
|
|
||||||
"date": 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
@ -1,6 +0,0 @@
|
|||||||
try_get_schema_from_db=fail to get structure info of target table from configure file and will try to get it from database
|
|
||||||
batch_size_too_small='batchSize' is too small, please increase it and try again
|
|
||||||
column_number_error=number of columns is less than expected
|
|
||||||
tag_value_error=tag columns include 'null' value
|
|
||||||
ts_value_error=timestamp column type error or null
|
|
||||||
infer_column_type_error=fail to infer column type: sample count %d, column index %d
|
|
@ -1,6 +0,0 @@
|
|||||||
try_get_schema_from_db=fail to get structure info of target table from configure file and will try to get it from database
|
|
||||||
batch_size_too_small='batchSize' is too small, please increase it and try again
|
|
||||||
column_number_error=number of columns is less than expected
|
|
||||||
tag_value_error=tag columns include 'null' value
|
|
||||||
ts_value_error=timestamp column type error or null
|
|
||||||
infer_column_type_error=fail to infer column type: sample count %d, column index %d
|
|
@ -1,6 +0,0 @@
|
|||||||
try_get_schema_from_db=\u65e0\u6cd5\u4ece\u914d\u7f6e\u6587\u4ef6\u83b7\u53d6\u8868\u7ed3\u6784\u4fe1\u606f\uff0c\u5c1d\u8bd5\u4ece\u6570\u636e\u5e93\u83b7\u53d6
|
|
||||||
batch_size_too_small=batchSize\u592a\u5c0f\uff0c\u4f1a\u589e\u52a0\u81ea\u52a8\u7c7b\u578b\u63a8\u65ad\u9519\u8bef\u7684\u6982\u7387\uff0c\u5efa\u8bae\u6539\u5927\u540e\u91cd\u8bd5
|
|
||||||
column_number_error=\u5b9e\u9645\u5217\u6570\u5c0f\u4e8e\u671f\u671b\u5217\u6570
|
|
||||||
tag_value_error=\u6807\u7b7e\u5217\u5305\u542bnull
|
|
||||||
ts_value_error=\u65f6\u95f4\u6233\u5217\u4e3anull\u6216\u7c7b\u578b\u9519\u8bef
|
|
||||||
infer_column_type_error=\u6839\u636e\u91c7\u6837\u7684%d\u6761\u6570\u636e\uff0c\u65e0\u6cd5\u63a8\u65ad\u7b2c%d\u5217\u7684\u6570\u636e\u7c7b\u578b
|
|
@ -0,0 +1,122 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.core.Engine;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.*;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
public class DM2TDengineTest {
|
||||||
|
|
||||||
|
private String host1 = "192.168.0.72";
|
||||||
|
private String host2 = "192.168.1.93";
|
||||||
|
private final Random random = new Random(System.currentTimeMillis());
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void dm2t_case01() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/dm2t-1.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void dm2t_case02() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/dm2t-2.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void dm2t_case03() throws Throwable {
|
||||||
|
// given
|
||||||
|
createTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/dm2t-3.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void dm2t_case04() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/dm2t-4.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupTable() throws SQLException {
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2(ts timestamp, f2 smallint, f4 bigint,f5 float, " +
|
||||||
|
"f6 double, f7 double, f8 bool, f9 nchar(100), f10 nchar(200)) tags(f1 tinyint,f3 int)");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupAndSubTable() throws SQLException {
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2(ts timestamp, f2 smallint, f4 bigint,f5 float, " +
|
||||||
|
"f6 double, f7 double, f8 bool, f9 nchar(100), f10 nchar(200)) tags(f1 tinyint,f3 int)");
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
stmt.execute("create table db2.t" + (i + 1) + "_" + i + " using db2.stb2 tags(" + (i + 1) + "," + i + ")");
|
||||||
|
}
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTable() throws SQLException {
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2(ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint,f5 float, " +
|
||||||
|
"f6 double, f7 double, f8 bool, f9 nchar(100), f10 nchar(200))");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws SQLException, ClassNotFoundException {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||||
|
long ts = System.currentTimeMillis();
|
||||||
|
|
||||||
|
final String url = "jdbc:dm://" + host1 + ":5236";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url, "TESTUSER", "test123456")) {
|
||||||
|
conn.setAutoCommit(true);
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop table if exists stb1");
|
||||||
|
stmt.execute("create table stb1(ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint, f5 float, " +
|
||||||
|
"f6 double, f7 NUMERIC(10,2), f8 BIT, f9 VARCHAR(100), f10 VARCHAR2(200))");
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
String sql = "insert into stb1 values('" + sdf.format(new Date(ts + i * 1000)) + "'," + (i + 1) + "," +
|
||||||
|
random.nextInt(100) + "," + i + ",4,5.55,6.666,7.77," + (random.nextBoolean() ? 1 : 0) +
|
||||||
|
",'abcABC123','北京朝阳望京DM')";
|
||||||
|
stmt.execute(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,297 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.common.element.DateColumn;
|
||||||
|
import com.alibaba.datax.common.element.LongColumn;
|
||||||
|
import com.alibaba.datax.common.element.Record;
|
||||||
|
import com.alibaba.datax.common.element.StringColumn;
|
||||||
|
import com.alibaba.datax.common.util.Configuration;
|
||||||
|
import com.alibaba.datax.core.transport.record.DefaultRecord;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.IntStream;
|
||||||
|
|
||||||
|
public class DefaultDataHandlerTest {
|
||||||
|
|
||||||
|
private static final String host = "192.168.1.93";
|
||||||
|
private static Connection conn;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeSupTableBySQL() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"tbname\", \"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
|
"\"table\":[\"stb1\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS-RS://" + host + ":6041/test\"," +
|
||||||
|
"\"batchSize\": \"1000\"" +
|
||||||
|
"}");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new StringColumn("tb" + (i + 10)));
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(2));
|
||||||
|
record.addColumn(new LongColumn(i));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(conn, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(10, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeSupTableBySQL_2() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"tbname\", \"ts\", \"f1\", \"t1\"]," +
|
||||||
|
"\"table\":[\"stb1\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS-RS://" + host + ":6041/test\"," +
|
||||||
|
"\"batchSize\": \"1000\"" +
|
||||||
|
"}");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new StringColumn("tb" + (i + 10)));
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(i));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(conn, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(10, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeSupTableBySchemaless() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
|
"\"table\":[\"stb1\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS://" + host + ":6030/scm_test\"," +
|
||||||
|
"\"batchSize\": \"1000\"" +
|
||||||
|
"}");
|
||||||
|
String jdbcUrl = configuration.getString("jdbcUrl");
|
||||||
|
Connection connection = DriverManager.getConnection(jdbcUrl, "root", "taosdata");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(2));
|
||||||
|
record.addColumn(new StringColumn("t" + i + " 22"));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(connection);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(connection, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(10, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeSubTableWithTableName() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"tbname\", \"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
|
"\"table\":[\"tb1\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS-RS://" + host + ":6041/test\"," +
|
||||||
|
"\"batchSize\": \"1000\"" +
|
||||||
|
"}");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new StringColumn("tb" + i));
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(2));
|
||||||
|
record.addColumn(new LongColumn(i));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(conn, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(1, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeSubTableWithoutTableName() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
|
"\"table\":[\"tb1\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS-RS://" + host + ":6041/test\"," +
|
||||||
|
"\"batchSize\": \"1000\"," +
|
||||||
|
"\"ignoreTagsUnmatched\": \"true\"" +
|
||||||
|
"}");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(2));
|
||||||
|
record.addColumn(new LongColumn(i));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(conn, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(1, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void writeNormalTable() throws SQLException {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
Configuration configuration = Configuration.from("{" +
|
||||||
|
"\"username\": \"root\"," +
|
||||||
|
"\"password\": \"taosdata\"," +
|
||||||
|
"\"column\": [\"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
|
"\"table\":[\"weather\"]," +
|
||||||
|
"\"jdbcUrl\":\"jdbc:TAOS-RS://" + host + ":6041/test\"," +
|
||||||
|
"\"batchSize\": \"1000\"," +
|
||||||
|
"\"ignoreTagsUnmatched\": \"true\"" +
|
||||||
|
"}");
|
||||||
|
long current = System.currentTimeMillis();
|
||||||
|
List<Record> recordList = IntStream.range(1, 11).mapToObj(i -> {
|
||||||
|
Record record = new DefaultRecord();
|
||||||
|
record.addColumn(new DateColumn(current + 1000 * i));
|
||||||
|
record.addColumn(new LongColumn(1));
|
||||||
|
record.addColumn(new LongColumn(2));
|
||||||
|
record.addColumn(new LongColumn(i));
|
||||||
|
return record;
|
||||||
|
}).collect(Collectors.toList());
|
||||||
|
|
||||||
|
// when
|
||||||
|
DefaultDataHandler handler = new DefaultDataHandler(configuration);
|
||||||
|
List<String> tables = configuration.getList("table", String.class);
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
Map<String, TableMeta> tableMetas = schemaManager.loadTableMeta(tables);
|
||||||
|
Map<String, List<ColumnMeta>> columnMetas = schemaManager.loadColumnMetas(tables);
|
||||||
|
handler.setTableMetas(tableMetas);
|
||||||
|
handler.setColumnMetas(columnMetas);
|
||||||
|
handler.setSchemaManager(schemaManager);
|
||||||
|
|
||||||
|
int count = handler.writeBatch(conn, recordList);
|
||||||
|
|
||||||
|
// then
|
||||||
|
Assert.assertEquals(10, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupAndSubTable() throws SQLException {
|
||||||
|
try(Statement stmt = conn.createStatement()){
|
||||||
|
stmt.execute("drop database if exists scm_test");
|
||||||
|
stmt.execute("create database if not exists scm_test");
|
||||||
|
stmt.execute("use scm_test");
|
||||||
|
stmt.execute("create table stb1(ts timestamp, f1 int, f2 int) tags(t1 nchar(32))");
|
||||||
|
stmt.execute("create table stb2(ts timestamp, f1 int, f2 int, f3 int) tags(t1 int, t2 int)");
|
||||||
|
stmt.execute("create table tb1 using stb1 tags(1)");
|
||||||
|
stmt.execute("create table tb2 using stb1 tags(2)");
|
||||||
|
stmt.execute("create table tb3 using stb2 tags(1,1)");
|
||||||
|
stmt.execute("create table tb4 using stb2 tags(2,2)");
|
||||||
|
stmt.execute("create table weather(ts timestamp, f1 int, f2 int, f3 int, t1 int, t2 int)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupTable() throws SQLException {
|
||||||
|
try (Statement stmt = conn.createStatement()){
|
||||||
|
stmt.execute("drop database if exists scm_test");
|
||||||
|
stmt.execute("create database if not exists scm_test");
|
||||||
|
stmt.execute("use scm_test");
|
||||||
|
stmt.execute("create table stb1(ts timestamp, f1 int, f2 int) tags(t1 nchar(32))");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() throws SQLException {
|
||||||
|
conn = DriverManager.getConnection("jdbc:TAOS-RS://" + host + ":6041", "root", "taosdata");
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClass() throws SQLException {
|
||||||
|
if (conn != null) {
|
||||||
|
conn.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,21 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.util.Properties;
|
|
||||||
|
|
||||||
public class JniConnectionTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void test() throws Exception {
|
|
||||||
JniConnection connection = new JniConnection(new Properties());
|
|
||||||
|
|
||||||
connection.open("192.168.56.105", 6030, "test", "root", "taosdata");
|
|
||||||
|
|
||||||
String json = "{\"metric\":\"weather_temperature\",\"timestamp\":1609430400000,\"value\":123,\"tags\":{\"location\":\"beijing\",\"id\":\"t123\"}}";
|
|
||||||
connection.insertOpentsdbJson(json);
|
|
||||||
|
|
||||||
connection.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,25 +0,0 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.ResourceBundle;
|
|
||||||
|
|
||||||
import org.junit.Assert;
|
|
||||||
|
|
||||||
public class MessageTest {
|
|
||||||
@Test
|
|
||||||
public void testChineseMessage() {
|
|
||||||
Locale local = new Locale("zh", "CN");
|
|
||||||
ResourceBundle bundle = ResourceBundle.getBundle("tdenginewritermsg", local);
|
|
||||||
String msg = bundle.getString("try_get_schema_fromdb");
|
|
||||||
Assert.assertEquals("无法从配置文件获取表结构信息,尝试从数据库获取", msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDefaultMessage() {
|
|
||||||
ResourceBundle bundle = ResourceBundle.getBundle("tdenginewritermsg", Locale.getDefault());
|
|
||||||
String msg = bundle.getString("try_get_schema_fromdb");
|
|
||||||
System.out.println(msg);
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,70 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.core.Engine;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.*;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
public class Mysql2TDengineTest {
|
||||||
|
|
||||||
|
private static final String host1 = "192.168.56.105";
|
||||||
|
private static final String host2 = "192.168.1.93";
|
||||||
|
private static final Random random = new Random(System.currentTimeMillis());
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void mysql2tdengine() throws Throwable {
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/m2t-1.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws SQLException {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||||
|
String ts = sdf.format(new Date(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
final String url = "jdbc:mysql://" + host1 + ":3306/?useSSL=false&useUnicode=true&charset=UTF-8&generateSimpleParameterMetadata=true";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url, "root", "123456")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
|
||||||
|
stmt.execute("drop database if exists db1");
|
||||||
|
stmt.execute("create database if not exists db1");
|
||||||
|
stmt.execute("use db1");
|
||||||
|
stmt.execute("create table stb1(id int primary key AUTO_INCREMENT, " +
|
||||||
|
"f1 tinyint, f2 smallint, f3 int, f4 bigint, " +
|
||||||
|
"f5 float, f6 double, " +
|
||||||
|
"ts timestamp, dt datetime," +
|
||||||
|
"f7 nchar(100), f8 varchar(100))");
|
||||||
|
for (int i = 1; i <= 10; i++) {
|
||||||
|
String sql = "insert into stb1(f1, f2, f3, f4, f5, f6, ts, dt, f7, f8) values(" +
|
||||||
|
i + "," + random.nextInt(100) + "," + random.nextInt(100) + "," + random.nextInt(100) + "," +
|
||||||
|
random.nextFloat() + "," + random.nextDouble() + ", " +
|
||||||
|
"'" + ts + "', '" + ts + "', " +
|
||||||
|
"'中国北京朝阳望京abc', '中国北京朝阳望京adc')";
|
||||||
|
stmt.execute(sql);
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041/";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2(" +
|
||||||
|
"ts timestamp, dt timestamp, " +
|
||||||
|
"f1 tinyint, f2 smallint, f3 int, f4 bigint, " +
|
||||||
|
"f5 float, f6 double, " +
|
||||||
|
"f7 nchar(100), f8 nchar(100))");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,36 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.core.Engine;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.*;
|
||||||
|
|
||||||
|
public class Opentsdb2TDengineTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void opentsdb2tdengine() throws SQLException {
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/o2t-1.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
try {
|
||||||
|
Engine.entry(params);
|
||||||
|
} catch (Throwable e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
// assert
|
||||||
|
String jdbcUrl = "jdbc:TAOS://192.168.56.105:6030/test?timestampFormat=TIMESTAMP";
|
||||||
|
try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
ResultSet rs = stmt.executeQuery("select count(*) from weather_temperature");
|
||||||
|
int rows = 0;
|
||||||
|
while (rs.next()) {
|
||||||
|
rows = rs.getInt("count(*)");
|
||||||
|
}
|
||||||
|
Assert.assertEquals(5, rows);
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.fastjson.util.TypeUtils;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class SchemaManagerTest {
|
||||||
|
|
||||||
|
private static Connection conn;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void loadTableMeta() throws SQLException {
|
||||||
|
// given
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
List<String> tables = Arrays.asList("stb1", "stb2", "tb1", "tb3", "weather");
|
||||||
|
|
||||||
|
// when
|
||||||
|
Map<String, TableMeta> tableMetaMap = schemaManager.loadTableMeta(tables);
|
||||||
|
|
||||||
|
// then
|
||||||
|
TableMeta stb1 = tableMetaMap.get("stb1");
|
||||||
|
Assert.assertEquals(TableType.SUP_TABLE, stb1.tableType);
|
||||||
|
Assert.assertEquals("stb1", stb1.tbname);
|
||||||
|
Assert.assertEquals(3, stb1.columns);
|
||||||
|
Assert.assertEquals(1, stb1.tags);
|
||||||
|
Assert.assertEquals(2, stb1.tables);
|
||||||
|
|
||||||
|
TableMeta tb3 = tableMetaMap.get("tb3");
|
||||||
|
Assert.assertEquals(TableType.SUB_TABLE, tb3.tableType);
|
||||||
|
Assert.assertEquals("tb3", tb3.tbname);
|
||||||
|
Assert.assertEquals(4, tb3.columns);
|
||||||
|
Assert.assertEquals("stb2", tb3.stable_name);
|
||||||
|
|
||||||
|
TableMeta weather = tableMetaMap.get("weather");
|
||||||
|
Assert.assertEquals(TableType.NML_TABLE, weather.tableType);
|
||||||
|
Assert.assertEquals("weather", weather.tbname);
|
||||||
|
Assert.assertEquals(6, weather.columns);
|
||||||
|
Assert.assertNull(weather.stable_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void loadColumnMetas() {
|
||||||
|
// given
|
||||||
|
SchemaManager schemaManager = new SchemaManager(conn);
|
||||||
|
List<String> tables = Arrays.asList("stb1", "stb2", "tb1", "tb3", "weather");
|
||||||
|
|
||||||
|
// when
|
||||||
|
Map<String, List<ColumnMeta>> columnMetaMap = schemaManager.loadColumnMetas(tables);
|
||||||
|
|
||||||
|
// then
|
||||||
|
List<ColumnMeta> stb1 = columnMetaMap.get("stb1");
|
||||||
|
Assert.assertEquals(4, stb1.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() throws SQLException {
|
||||||
|
conn = DriverManager.getConnection("jdbc:TAOS-RS://192.168.56.105:6041", "root", "taosdata");
|
||||||
|
try (Statement stmt = conn.createStatement()) {
|
||||||
|
stmt.execute("drop database if exists scm_test");
|
||||||
|
stmt.execute("create database if not exists scm_test");
|
||||||
|
stmt.execute("use scm_test");
|
||||||
|
stmt.execute("create table stb1(ts timestamp, f1 int, f2 int) tags(t1 int)");
|
||||||
|
stmt.execute("create table stb2(ts timestamp, f1 int, f2 int, f3 int) tags(t1 int, t2 int)");
|
||||||
|
stmt.execute("insert into tb1 using stb1 tags(1) values(now, 1, 2)");
|
||||||
|
stmt.execute("insert into tb2 using stb1 tags(2) values(now, 1, 2)");
|
||||||
|
stmt.execute("insert into tb3 using stb2 tags(1,1) values(now, 1, 2, 3)");
|
||||||
|
stmt.execute("insert into tb4 using stb2 tags(2,2) values(now, 1, 2, 3)");
|
||||||
|
stmt.execute("create table weather(ts timestamp, f1 int, f2 int, f3 int, t1 int, t2 int)");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClass() throws SQLException {
|
||||||
|
if (conn != null) {
|
||||||
|
conn.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,69 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.core.Engine;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.Connection;
|
||||||
|
import java.sql.DriverManager;
|
||||||
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
|
|
||||||
|
public class Stream2TDengineTest {
|
||||||
|
|
||||||
|
private String host2 = "192.168.56.105";
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void s2t_case1() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable("ms");
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/defaultJob.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void s2t_case2() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable("us");
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/defaultJob.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void s2t_case3() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable("ns");
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/defaultJob.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
void createSupTable(String precision) throws SQLException {
|
||||||
|
|
||||||
|
final String url = "jdbc:TAOS-RS://" + host2 + ":6041/";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2 precision '" + precision + "'");
|
||||||
|
stmt.execute("create table db2.stb2(ts1 timestamp, ts2 timestamp,ts3 timestamp,ts4 timestamp,ts5 timestamp," +
|
||||||
|
"ts6 timestamp,ts7 timestamp, ts8 timestamp, ts9 timestamp, ts10 timestamp, f1 tinyint, f2 smallint," +
|
||||||
|
"f3 int, f4 bigint, f5 float, f6 double," +
|
||||||
|
"f7 bool, f8 binary(100), f9 nchar(100)) tags(t1 timestamp,t2 timestamp,t3 timestamp,t4 timestamp," +
|
||||||
|
"t5 timestamp,t6 timestamp,t7 timestamp, t8 tinyint, t9 smallint, t10 int, t11 bigint, t12 float," +
|
||||||
|
"t13 double, t14 bool, t15 binary(100), t16 nchar(100))");
|
||||||
|
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,127 @@
|
|||||||
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.core.Engine;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.sql.*;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
|
public class TDengine2TDengineTest {
|
||||||
|
|
||||||
|
private static final String host1 = "192.168.56.105";
|
||||||
|
private static final String host2 = "192.168.1.93";
|
||||||
|
private static final Random random = new Random(System.currentTimeMillis());
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void case_01() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/t2t-1.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void case_02() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/t2t-2.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void case_03() throws Throwable {
|
||||||
|
// given
|
||||||
|
createSupAndSubTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/t2t-3.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void case_04() throws Throwable {
|
||||||
|
// given
|
||||||
|
createTable();
|
||||||
|
|
||||||
|
// when
|
||||||
|
String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/test/resources/t2t-4.json"};
|
||||||
|
System.setProperty("datax.home", "../target/datax/datax");
|
||||||
|
Engine.entry(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createTable() throws SQLException {
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.weather (ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint, " +
|
||||||
|
"f5 float, f6 double, f7 bool, f8 binary(100), f9 nchar(100))");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupTable() throws SQLException {
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2 (ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint," +
|
||||||
|
" f5 float, f6 double, f7 bool, f8 binary(100), f9 nchar(100)) tags(t1 timestamp, t2 tinyint, " +
|
||||||
|
"t3 smallint, t4 int, t5 bigint, t6 float, t7 double, t8 bool, t9 binary(100), t10 nchar(1000))");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSupAndSubTable() throws SQLException {
|
||||||
|
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||||
|
final String ts = sdf.format(new Date(System.currentTimeMillis()));
|
||||||
|
|
||||||
|
final String url2 = "jdbc:TAOS-RS://" + host2 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url2, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
stmt.execute("drop database if exists db2");
|
||||||
|
stmt.execute("create database if not exists db2");
|
||||||
|
stmt.execute("create table db2.stb2 (ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint," +
|
||||||
|
" f5 float, f6 double, f7 bool, f8 binary(100), f9 nchar(100)) tags(t1 timestamp, t2 tinyint, " +
|
||||||
|
"t3 smallint, t4 int, t5 bigint, t6 float, t7 double, t8 bool, t9 binary(100), t10 nchar(1000))");
|
||||||
|
|
||||||
|
stmt.execute("create table db2.t1 using db2.stb2 tags('" + ts + "',1,2,3,4,5.0,6.0,true,'abc123ABC','北京朝阳望京')");
|
||||||
|
stmt.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws SQLException {
|
||||||
|
final String url = "jdbc:TAOS-RS://" + host1 + ":6041";
|
||||||
|
try (Connection conn = DriverManager.getConnection(url, "root", "taosdata")) {
|
||||||
|
Statement stmt = conn.createStatement();
|
||||||
|
|
||||||
|
stmt.execute("drop database if exists db1");
|
||||||
|
stmt.execute("create database if not exists db1");
|
||||||
|
stmt.execute("create table db1.stb1 (ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint," +
|
||||||
|
" f5 float, f6 double, f7 bool, f8 binary(100), f9 nchar(100)) tags(t1 timestamp, t2 tinyint, " +
|
||||||
|
"t3 smallint, t4 int, t5 bigint, t6 float, t7 double, t8 bool, t9 binary(100), t10 nchar(1000))");
|
||||||
|
for (int i = 0; i < 10; i++) {
|
||||||
|
String sql = "insert into db1.t" + (i + 1) + " using db1.stb1 tags(now+" + i + "s," +
|
||||||
|
random.nextInt(100) + "," + random.nextInt(100) + "," + random.nextInt(100) + "," +
|
||||||
|
random.nextInt(100) + "," + random.nextFloat() + "," + random.nextDouble() + "," +
|
||||||
|
random.nextBoolean() + ",'abc123ABC','北京朝阳望京') values(now+" + i + "s, " +
|
||||||
|
random.nextInt(100) + "," + random.nextInt(100) + "," + random.nextInt(100) + "," +
|
||||||
|
random.nextInt(100) + "," + random.nextFloat() + "," + random.nextDouble() + "," +
|
||||||
|
random.nextBoolean() + ",'abc123ABC','北京朝阳望京')";
|
||||||
|
stmt.execute(sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,31 +1,62 @@
|
|||||||
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
package com.alibaba.datax.plugin.writer.tdenginewriter;
|
||||||
|
|
||||||
|
import com.alibaba.datax.common.util.Configuration;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.sql.Connection;
|
import java.util.List;
|
||||||
import java.sql.DriverManager;
|
|
||||||
import java.sql.SQLException;
|
|
||||||
import java.sql.Statement;
|
|
||||||
|
|
||||||
public class TDengineWriterTest {
|
public class TDengineWriterTest {
|
||||||
|
|
||||||
|
TDengineWriter.Job job;
|
||||||
|
|
||||||
@Test
|
@Before
|
||||||
public void testGetSchema() throws ClassNotFoundException, SQLException {
|
public void before() {
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
job = new TDengineWriter.Job();
|
||||||
String jdbcUrl = String.format("jdbc:TAOS://%s:%s/%s?user=%s&password=%s", "wozai.fun", "6030", "test", "root", "taosdata");
|
Configuration configuration = Configuration.from("{" +
|
||||||
Connection conn = DriverManager.getConnection(jdbcUrl);
|
"\"username\": \"root\"," +
|
||||||
SchemaManager schemaManager = new SchemaManager();
|
"\"password\": \"taosdata\"," +
|
||||||
schemaManager.setStable("test1");
|
"\"column\": [\"ts\", \"f1\", \"f2\", \"t1\"]," +
|
||||||
schemaManager.getFromDB(conn);
|
"\"connection\": [{\"table\":[\"weather\"],\"jdbcUrl\":\"jdbc:TAOS-RS://master:6041/test\"}]," +
|
||||||
|
"\"batchSize\": \"1000\"" +
|
||||||
|
"}");
|
||||||
|
job.setPluginJobConf(configuration);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void dropTestTable() throws ClassNotFoundException, SQLException {
|
public void jobInit() {
|
||||||
Class.forName("com.taosdata.jdbc.TSDBDriver");
|
// when
|
||||||
String jdbcUrl = String.format("jdbc:TAOS://%s:%s/%s?user=%s&password=%s", "wozai.fun", "6030", "test", "root", "taosdata");
|
job.init();
|
||||||
Connection conn = DriverManager.getConnection(jdbcUrl);
|
|
||||||
Statement stmt = conn.createStatement();
|
// assert
|
||||||
stmt.execute("drop table market_snapshot");
|
Configuration conf = job.getPluginJobConf();
|
||||||
|
|
||||||
|
Assert.assertEquals("root", conf.getString("username"));
|
||||||
|
Assert.assertEquals("taosdata", conf.getString("password"));
|
||||||
|
Assert.assertEquals("jdbc:TAOS-RS://master:6041/test", conf.getString("connection[0].jdbcUrl"));
|
||||||
|
Assert.assertEquals(new Integer(1000), conf.getInt("batchSize"));
|
||||||
|
Assert.assertEquals("ts", conf.getString("column[0]"));
|
||||||
|
Assert.assertEquals("f2", conf.getString("column[2]"));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
@Test
|
||||||
|
public void jobSplit() {
|
||||||
|
// when
|
||||||
|
job.init();
|
||||||
|
List<Configuration> configurationList = job.split(10);
|
||||||
|
|
||||||
|
// assert
|
||||||
|
Assert.assertEquals(10, configurationList.size());
|
||||||
|
for (Configuration conf : configurationList) {
|
||||||
|
Assert.assertEquals("root", conf.getString("username"));
|
||||||
|
Assert.assertEquals("taosdata", conf.getString("password"));
|
||||||
|
Assert.assertEquals("jdbc:TAOS-RS://master:6041/test", conf.getString("jdbcUrl"));
|
||||||
|
Assert.assertEquals(new Integer(1000), conf.getInt("batchSize"));
|
||||||
|
Assert.assertEquals("ts", conf.getString("column[0]"));
|
||||||
|
Assert.assertEquals("f2", conf.getString("column[2]"));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
226
tdenginewriter/src/test/resources/defaultJob.json
Normal file
226
tdenginewriter/src/test/resources/defaultJob.json
Normal file
@ -0,0 +1,226 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "streamreader",
|
||||||
|
"parameter": {
|
||||||
|
"column": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "tb1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:02.123",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:03.123456",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSSSSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:04.123456789",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSSSSSSSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:05.123"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:06.123456"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:07.123456789"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 1645329608000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 1645329609000000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 1645329610000000000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 100"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 1000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"random": "0, 10000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"random": "10, 20"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bool",
|
||||||
|
"random": "0, 50"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bytes",
|
||||||
|
"random": "0, 10"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"random": "10, 50"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:01"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:02.123",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:03.123456",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSSSSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "date",
|
||||||
|
"value": "2022-02-20 12:00:04.123456789",
|
||||||
|
"dateFormat": "yyyy-MM-dd HH:mm:ss.SSSSSSSSS"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:05.123"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:06.123456"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "2022-02-20 12:00:07.123456789"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "long",
|
||||||
|
"value": 4
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"value": 5.55
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "double",
|
||||||
|
"value": 6.666666
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bool",
|
||||||
|
"value": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "bytes",
|
||||||
|
"value": "abcABC123"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"value": "北京朝阳望京"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"sliceRecordCount": 10
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"tbname",
|
||||||
|
"ts1",
|
||||||
|
"ts2",
|
||||||
|
"ts3",
|
||||||
|
"ts4",
|
||||||
|
"ts5",
|
||||||
|
"ts6",
|
||||||
|
"ts7",
|
||||||
|
"ts8",
|
||||||
|
"ts9",
|
||||||
|
"ts10",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10",
|
||||||
|
"t11",
|
||||||
|
"t12",
|
||||||
|
"t13",
|
||||||
|
"t14",
|
||||||
|
"t15",
|
||||||
|
"t16"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 100,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
tdenginewriter/src/test/resources/dm-schema.sql
Normal file
30
tdenginewriter/src/test/resources/dm-schema.sql
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
select tablespace_name
|
||||||
|
from dba_data_files;
|
||||||
|
|
||||||
|
create
|
||||||
|
tablespace test datafile '/home/dmdba/dmdbms/data/DAMENG/test.dbf' size 32 autoextend on next 1 maxsize 1024;
|
||||||
|
|
||||||
|
create
|
||||||
|
user TESTUSER identified by test123456 default tablespace test;
|
||||||
|
|
||||||
|
grant dba to TESTUSER;
|
||||||
|
|
||||||
|
select *
|
||||||
|
from user_tables;
|
||||||
|
|
||||||
|
drop table if exists stb1;
|
||||||
|
|
||||||
|
create table stb1
|
||||||
|
(
|
||||||
|
ts timestamp,
|
||||||
|
f1 tinyint,
|
||||||
|
f2 smallint,
|
||||||
|
f3 int,
|
||||||
|
f4 bigint,
|
||||||
|
f5 float,
|
||||||
|
f6 double,
|
||||||
|
f7 NUMERIC(10, 2),
|
||||||
|
f8 BIT,
|
||||||
|
f9 VARCHAR(100),
|
||||||
|
f10 VARCHAR2(200)
|
||||||
|
);
|
62
tdenginewriter/src/test/resources/dm2t-1.json
Normal file
62
tdenginewriter/src/test/resources/dm2t-1.json
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "rdbmsreader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "TESTUSER",
|
||||||
|
"password": "test123456",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"querySql": [
|
||||||
|
"select concat(concat(concat('t', f1), '_'),f3) as tbname,* from stb1;"
|
||||||
|
],
|
||||||
|
"jdbcUrl": [
|
||||||
|
"jdbc:dm://192.168.0.72:5236"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetchSize": 1024
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"f10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
62
tdenginewriter/src/test/resources/dm2t-2.json
Normal file
62
tdenginewriter/src/test/resources/dm2t-2.json
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "rdbmsreader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "TESTUSER",
|
||||||
|
"password": "test123456",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"querySql": [
|
||||||
|
"select concat(concat(concat('t', f1), '_'),f3) as tbname,* from stb1;"
|
||||||
|
],
|
||||||
|
"jdbcUrl": [
|
||||||
|
"jdbc:dm://192.168.0.72:5236"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetchSize": 1024,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"f10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"t1_0"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
76
tdenginewriter/src/test/resources/dm2t-3.json
Normal file
76
tdenginewriter/src/test/resources/dm2t-3.json
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "rdbmsreader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "TESTUSER",
|
||||||
|
"password": "test123456",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"f10"
|
||||||
|
],
|
||||||
|
"splitPk": "f1",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": [
|
||||||
|
"jdbc:dm://192.168.0.72:5236"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetchSize": 1024,
|
||||||
|
"where": "1 = 1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"f10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
61
tdenginewriter/src/test/resources/dm2t-4.json
Normal file
61
tdenginewriter/src/test/resources/dm2t-4.json
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "rdbmsreader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "TESTUSER",
|
||||||
|
"password": "test123456",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"querySql": [
|
||||||
|
"select * from stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": [
|
||||||
|
"jdbc:dm://192.168.0.72:5236"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"fetchSize": 1024
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"f10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS://192.168.1.93:6030/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
72
tdenginewriter/src/test/resources/m2t-1.json
Normal file
72
tdenginewriter/src/test/resources/m2t-1.json
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "mysqlreader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "123456",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"dt",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8"
|
||||||
|
],
|
||||||
|
"splitPk": "id",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": [
|
||||||
|
"jdbc:mysql://192.168.56.105:3306/db1?useSSL=false&useUnicode=true&characterEncoding=utf8"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"dt",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
36
tdenginewriter/src/test/resources/o2t-1.json
Normal file
36
tdenginewriter/src/test/resources/o2t-1.json
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"job":{
|
||||||
|
"content":[{
|
||||||
|
"reader": {
|
||||||
|
"name": "opentsdbreader",
|
||||||
|
"parameter": {
|
||||||
|
"endpoint": "http://192.168.56.105:4242",
|
||||||
|
"column": ["weather_temperature"],
|
||||||
|
"beginDateTime": "2021-01-01 00:00:00",
|
||||||
|
"endDateTime": "2021-01-01 01:00:00"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"meters"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS://192.168.56.105:6030/test?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
94
tdenginewriter/src/test/resources/t2t-1.json
Normal file
94
tdenginewriter/src/test/resources/t2t-1.json
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "tdenginereader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/db1?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"column": [
|
||||||
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"beginDateTime": "2022-02-15 00:00:00",
|
||||||
|
"endDateTime": "2022-02-16 00:00:00",
|
||||||
|
"splitInterval": "1d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"tbname",
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
92
tdenginewriter/src/test/resources/t2t-2.json
Normal file
92
tdenginewriter/src/test/resources/t2t-2.json
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "tdenginereader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/db1?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"beginDateTime": "2022-02-15 00:00:00",
|
||||||
|
"endDateTime": "2022-02-16 00:00:00",
|
||||||
|
"splitInterval": "1d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb2"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS://192.168.1.93:6030/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
92
tdenginewriter/src/test/resources/t2t-3.json
Normal file
92
tdenginewriter/src/test/resources/t2t-3.json
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "tdenginereader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/db1?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"beginDateTime": "2022-02-15 00:00:00",
|
||||||
|
"endDateTime": "2022-02-16 00:00:00",
|
||||||
|
"splitInterval": "1d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9",
|
||||||
|
"t1",
|
||||||
|
"t2",
|
||||||
|
"t3",
|
||||||
|
"t4",
|
||||||
|
"t5",
|
||||||
|
"t6",
|
||||||
|
"t7",
|
||||||
|
"t8",
|
||||||
|
"t9",
|
||||||
|
"t10"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"t1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS://192.168.1.93:6030/db2?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
72
tdenginewriter/src/test/resources/t2t-4.json
Normal file
72
tdenginewriter/src/test/resources/t2t-4.json
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"job": {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"reader": {
|
||||||
|
"name": "tdenginereader",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"stb1"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.56.105:6041/db1?timestampFormat=TIMESTAMP"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9"
|
||||||
|
],
|
||||||
|
"beginDateTime": "2022-02-15 00:00:00",
|
||||||
|
"endDateTime": "2022-02-16 00:00:00",
|
||||||
|
"splitInterval": "1d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"writer": {
|
||||||
|
"name": "tdenginewriter",
|
||||||
|
"parameter": {
|
||||||
|
"username": "root",
|
||||||
|
"password": "taosdata",
|
||||||
|
"column": [
|
||||||
|
"ts",
|
||||||
|
"f1",
|
||||||
|
"f2",
|
||||||
|
"f3",
|
||||||
|
"f4",
|
||||||
|
"f5",
|
||||||
|
"f6",
|
||||||
|
"f7",
|
||||||
|
"f8",
|
||||||
|
"f9"
|
||||||
|
],
|
||||||
|
"connection": [
|
||||||
|
{
|
||||||
|
"table": [
|
||||||
|
"weather"
|
||||||
|
],
|
||||||
|
"jdbcUrl": "jdbc:TAOS-RS://192.168.1.93:6041/db2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"batchSize": 1000,
|
||||||
|
"ignoreTagsUnmatched": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"setting": {
|
||||||
|
"speed": {
|
||||||
|
"channel": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user