diff --git a/README.md b/README.md
index 9d18a0a9..37a21022 100644
--- a/README.md
+++ b/README.md
@@ -60,8 +60,7 @@ DataX目前已经有了比较全面的插件体系,主流的RDBMS数据库、N
| | FTP | √ | √ |[读](https://github.com/alibaba/DataX/blob/master/ftpreader/doc/ftpreader.md) 、[写](https://github.com/alibaba/DataX/blob/master/ftpwriter/doc/ftpwriter.md)|
| | HDFS | √ | √ |[读](https://github.com/alibaba/DataX/blob/master/hdfsreader/doc/hdfsreader.md) 、[写](https://github.com/alibaba/DataX/blob/master/hdfswriter/doc/hdfswriter.md)|
| | Elasticsearch | | √ |[写](https://github.com/alibaba/DataX/blob/master/elasticsearchwriter/doc/elasticsearchwriter.md)|
-| 时间序列数据库 | TDengine | √ | √ |[读](https://github.com/taosdata/DataX/blob/master/tdenginereader/doc/tdenginereader.md) 、[写](https://github.com/taosdata/DataX/blob/master/tdenginewriter/doc/tdenginewriter.md)|
-| | OpenTSDB | √ | |[读](https://github.com/alibaba/DataX/blob/master/opentsdbreader/doc/opentsdbreader.md)|
+| 时间序列数据库 | OpenTSDB | √ | |[读](https://github.com/alibaba/DataX/blob/master/opentsdbreader/doc/opentsdbreader.md)|
| | TSDB | √ | √ |[读](https://github.com/alibaba/DataX/blob/master/tsdbreader/doc/tsdbreader.md) 、[写](https://github.com/alibaba/DataX/blob/master/tsdbwriter/doc/tsdbhttpwriter.md)|
# 阿里云DataWorks数据集成
diff --git a/core/pom.xml b/core/pom.xml
index 3981cfcc..174a18d3 100755
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -97,18 +97,6 @@
groovy-all
2.1.9
-
- mysql
- mysql-connector-java
- ${mysql.driver.version}
- test
-
-
- com.taosdata.jdbc
- taos-jdbcdriver
- 2.0.34
- test
-
diff --git a/core/src/main/job/mongodb2tdengine.json b/core/src/main/job/mongodb2tdengine.json
deleted file mode 100644
index 49e04c11..00000000
--- a/core/src/main/job/mongodb2tdengine.json
+++ /dev/null
@@ -1,78 +0,0 @@
-{
- "job": {
- "setting": {
- "speed": {
- "channel": 2
- }
- },
- "content": [
- {
- "reader": {
- "name": "mongodbreader",
- "parameter": {
- "address": [
- "127.0.0.1:27017"
- ],
- "userName": "mongouser",
- "mechanism": "SCRAM-SHA-1",
- "userPassword": "mongopass",
- "authDb": "admin",
- "dbName": "test",
- "collectionName": "cu_market_data",
- "column": [
- {
- "name": "instrumentID",
- "type": "string"
- },
- {
- "name": "tradeTime",
- "type": "date"
- },
- {
- "name": "lastPrice",
- "type": "double"
- },
- {
- "name": "askPrice1",
- "type": "double"
- },
- {
- "name": "bidPrice1",
- "type": "double"
- },
- {
- "name": "volume",
- "type": "int"
- }
- ]
- }
- },
- "writer": {
- "name": "tdenginewriter",
- "parameter": {
- "host": "127.0.0.1",
- "port": 6030,
- "dbname": "test",
- "user": "root",
- "password": "taosdata",
- "stable": "market_snapshot",
- "batchSize": 35,
- "tagColumn": {
- "product": "cu",
- "instrumentID": 0
- },
- "fieldColumn": {
- "lastPrice": 2,
- "askPrice1": 3,
- "bidPrice1": 4,
- "volume": 5
- },
- "timestampColumn": {
- "tradeTime": 1
- }
- }
- }
- }
- ]
- }
-}
\ No newline at end of file
diff --git a/core/src/main/job/mysql2tdengine.json b/core/src/main/job/mysql2tdengine.json
deleted file mode 100644
index c936aa36..00000000
--- a/core/src/main/job/mysql2tdengine.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
- "job": {
- "content": [
- {
- "reader": {
- "name": "mysqlreader",
- "parameter": {
- "username": "root",
- "password": "passw0rd",
- "column": [
- "*"
- ],
- "splitPk": "station",
- "connection": [
- {
- "table": [
- "weather"
- ],
- "jdbcUrl": [
- "jdbc:mysql://127.0.0.1:3306/test?useSSL=false&useUnicode=true&characterEncoding=utf8"
- ]
- }
- ]
- }
- },
- "writer": {
- "name": "tdenginewriter",
- "parameter": {
- "host": "127.0.0.1",
- "port": 6030,
- "dbname": "test",
- "user": "root",
- "password": "taosdata",
- "batchSize": 1000,
- "stable": "weather",
- "tagColumn": {
- "station": 0
- },
- "fieldColumn": {
- "latitude": 1,
- "longtitude": 2,
- "tmax": 4,
- "tmin": 5
- },
- "timestampColumn":{
- "date": 3
- }
- }
- }
- }
- ],
- "setting": {
- "speed": {
- "channel": 1
- }
- }
- }
-}
\ No newline at end of file
diff --git a/core/src/main/job/opentsdb2tdengine.json b/core/src/main/job/opentsdb2tdengine.json
deleted file mode 100644
index 377b98c9..00000000
--- a/core/src/main/job/opentsdb2tdengine.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "job": {
- "content": [
- {
- "reader": {
- "name": "opentsdbreader",
- "parameter": {
- "endpoint": "http://192.168.1.180:4242",
- "column": [
- "weather_temperature"
- ],
- "beginDateTime": "2021-01-01 00:00:00",
- "endDateTime": "2021-01-01 01:00:00"
- }
- },
- "writer": {
- "name": "tdenginewriter",
- "parameter": {
- "host": "192.168.56.105",
- "port": 6030,
- "dbname": "test",
- "user": "root",
- "password": "taosdata",
- "batchSize": 1000
- }
- }
- }
- ],
- "setting": {
- "speed": {
- "channel": 1
- }
- }
- }
-}
\ No newline at end of file
diff --git a/core/src/main/job/stream2tdengine.json b/core/src/main/job/stream2tdengine.json
deleted file mode 100644
index 6af68323..00000000
--- a/core/src/main/job/stream2tdengine.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
- "job": {
- "content": [
- {
- "reader": {
- "name": "streamreader",
- "parameter": {
- "column": [
- {
- "random": "2021-01-01 00:00:00, 2021-01-01 23:59:59",
- "type": "date"
- },
- {
- "random": "0, 10000",
- "type": "long"
- },
- {
- "random": "0, 10",
- "type": "string"
- },
- {
- "random": "0, 5",
- "type": "bool"
- },
- {
- "random": "0, 10",
- "type": "double"
- },
- {
- "random": "0, 10",
- "type": "bytes"
- }
- ],
- "sliceRecordCount": 100
- }
- },
- "writer": {
- "name": "tdenginewriter",
- "parameter": {
- "host": "192.168.56.105",
- "port": 6030,
- "dbname": "test",
- "user": "root",
- "password": "taosdata",
- "batchSize": 1000
- }
- }
- }
- ],
- "setting": {
- "speed": {
- "channel": 1
- }
- }
- }
-}
\ No newline at end of file
diff --git a/core/src/main/job/tdengine2tdengine.json b/core/src/main/job/tdengine2tdengine.json
deleted file mode 100644
index 750ae202..00000000
--- a/core/src/main/job/tdengine2tdengine.json
+++ /dev/null
@@ -1,38 +0,0 @@
-{
- "job": {
- "content": [
- {
- "reader": {
- "name": "tdenginereader",
- "parameter": {
- "host": "192.168.1.82",
- "port": 6030,
- "db": "test",
- "user": "root",
- "password": "taosdata",
- "sql": "select * from weather",
- "beginDateTime": "2021-01-01 00:00:00",
- "endDateTime": "2021-01-02 00:00:00",
- "splitInterval": "1h"
- }
- },
- "writer": {
- "name": "tdenginewriter",
- "parameter": {
- "host": "192.168.56.105",
- "port": 6030,
- "dbname": "test",
- "user": "root",
- "password": "taosdata",
- "batchSize": 1000
- }
- }
- }
- ],
- "setting": {
- "speed": {
- "channel": 1
- }
- }
- }
-}
\ No newline at end of file
diff --git a/core/src/test/java/com/alibaba/datax/core/EngineTest.java b/core/src/test/java/com/alibaba/datax/core/EngineTest.java
deleted file mode 100644
index fe4dff8b..00000000
--- a/core/src/test/java/com/alibaba/datax/core/EngineTest.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.alibaba.datax.core;
-
-
-import org.junit.Test;
-
-public class EngineTest {
-
- @Test
- public void test() {
- System.out.println(System.getProperty("java.library.path"));
-// String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/main/job/opentsdb2tdengine.json"};
- String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/main/job/mysql2tdengine.json"};
- System.setProperty("datax.home", "../target/datax/datax");
- try {
- Engine.entry(params);
- } catch (Throwable e) {
- e.printStackTrace();
- }
- }
-
-}
\ No newline at end of file
diff --git a/core/src/test/java/com/alibaba/datax/core/TestMysql2TDengine.java b/core/src/test/java/com/alibaba/datax/core/TestMysql2TDengine.java
deleted file mode 100644
index 4aaad646..00000000
--- a/core/src/test/java/com/alibaba/datax/core/TestMysql2TDengine.java
+++ /dev/null
@@ -1,104 +0,0 @@
-package com.alibaba.datax.core;
-
-import org.junit.Test;
-
-import java.sql.*;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.List;
-import java.util.Random;
-
-/**
- * 测试从mysql到TDengine
- */
-public class TestMysql2TDengine {
-
- @Test
- public void genTestData() throws SQLException {
- Connection conn;
- Statement stmt = null;
- PreparedStatement pstmt = null;
-
- try {
- conn = DriverManager.getConnection("jdbc:mysql://localhost/mysql?" +
- "user=root&password=passw0rd");
- stmt = conn.createStatement();
- stmt.execute("create database if not exists test");
- stmt.execute("use test");
- stmt.execute("drop table weather");
- stmt.execute("CREATE TABLE IF NOT EXISTS weather(station varchar(100), latitude DOUBLE, longtitude DOUBLE, `date` DATETIME, tmax INT, tmin INT)");
- pstmt = conn.prepareStatement("insert into weather(station, latitude, longtitude, `date`, tmax, tmin) values (?, ?, ?, ?, ?, ?)");
- genRandomData(pstmt);
- } finally {
- if (stmt != null) {
- try {
- stmt.close();
- } catch (SQLException sqlEx) {
- } // ignore
-
- stmt = null;
- }
-
- if (pstmt != null) {
- pstmt.close();
- pstmt = null;
- }
- }
-
- }
-
- private void genRandomData(PreparedStatement psmt) throws SQLException {
- Random random = new Random();
- Calendar calendar = Calendar.getInstance();
- calendar.set(1990, 0, 1, 1, 0, 0);
- List stations = Arrays.asList("STA", "STB", "STC");
- for (int i = 0; i < (10 * 100 * 24); i++) {
- for (int j = 0; j < 3; j++) {
- psmt.setString(1, stations.get(j));
- psmt.setDouble(2, random.nextDouble() * 1000);
- psmt.setDouble(3, random.nextDouble() * 1000);
- psmt.setTimestamp(4, new java.sql.Timestamp(calendar.getTime().getTime()));
- psmt.setInt(5, random.nextInt(100));
- psmt.setInt(6, random.nextInt(100));
- psmt.addBatch();
- }
- calendar.add(Calendar.MINUTE, 60);
- if (i % 1000 == 0) {
- psmt.executeBatch();
- }
- }
- psmt.executeBatch();
- }
-
- @Test
- public void prepareTDengine() throws SQLException {
- Connection conn;
- Statement stmt = null;
-
- try {
- conn = DriverManager.getConnection("jdbc:TAOS://127.0.0.1:6030/log?user=root&password=taosdata");
- stmt = conn.createStatement();
- stmt.execute("drop database if exists test");
- stmt.execute("create database if not exists test keep 36500");
- stmt.execute("drop stable if exists test.weather");
- } finally {
- if (stmt != null) {
- stmt.close();
- }
- }
- }
-
- @Test
- public void test() {
- System.out.println(System.getProperty("java.library.path"));
- String[] params = {"-mode", "standalone", "-jobid", "-1", "-job", "src/main/job/mysql2tdengine.json"};
- System.setProperty("datax.home", "../target/datax/datax");
- try {
- Engine.entry(params);
- } catch (Throwable e) {
- e.printStackTrace();
- }
- }
-
-
-}
\ No newline at end of file
diff --git a/package.xml b/package.xml
index 1b291a99..d95feb40 100755
--- a/package.xml
+++ b/package.xml
@@ -103,13 +103,13 @@
datax
-
- otsstreamreader/target/datax/
-
- **/*.*
-
- datax
-
+
+ otsstreamreader/target/datax/
+
+ **/*.*
+
+ datax
+
txtfilereader/target/datax/
@@ -138,7 +138,7 @@
datax
-
+
ftpreader/target/datax/
**/*.*
@@ -180,17 +180,17 @@
datax
+
+
- tdenginereader/target/datax/
+ mysqlwriter/target/datax/
**/*.*
datax
-
-
- mysqlwriter/target/datax/
+ tdenginewriter/target/datax/
**/*.*
@@ -259,7 +259,7 @@
datax
-
+
oraclewriter/target/datax/
**/*.*
@@ -273,7 +273,7 @@
datax
-
+
postgresqlwriter/target/datax/
**/*.*
@@ -399,12 +399,5 @@
datax
-
- tdenginewriter/target/datax/
-
- **/*.*
-
- datax
-
diff --git a/pom.xml b/pom.xml
index 2358e212..1a9da81b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -48,68 +48,66 @@
mysqlreader
-
-
-
-
-
+ drdsreader
+ sqlserverreader
+ postgresqlreader
+ kingbaseesreader
+ oraclereader
odpsreader
-
-
+ otsreader
+ otsstreamreader
txtfilereader
-
+ hdfsreader
streamreader
-
-
-
+ ossreader
+ ftpreader
+ mongodbreader
rdbmsreader
-
-
-
+ hbase11xreader
+ hbase094xreader
+ tsdbreader
opentsdbreader
-
-
-
+ cassandrareader
+ gdbreader
+ oceanbasev10reader
-
-
+ mysqlwriter
+ tdenginewriter
+ drdswriter
odpswriter
txtfilewriter
-
-
+ ftpwriter
+ hdfswriter
streamwriter
-
-
-
-
-
-
-
+ otswriter
+ oraclewriter
+ sqlserverwriter
+ postgresqlwriter
+ kingbaseeswriter
+ osswriter
+ mongodbwriter
adswriter
-
+ ocswriter
rdbmswriter
-
-
-
-
-
-
-
-
-
-
-
-
-
+ hbase11xwriter
+ hbase094xwriter
+ hbase11xsqlwriter
+ hbase11xsqlreader
+ elasticsearchwriter
+ tsdbwriter
+ adbpgwriter
+ gdbwriter
+ cassandrawriter
+ clickhousewriter
+ oscarwriter
+ oceanbasev10writer
plugin-rdbms-util
plugin-unstructured-storage-util
-
-
-
- tdenginewriter
- tdenginereader
+ hbase20xsqlreader
+ hbase20xsqlwriter
+ kuduwriter
diff --git a/tdenginereader/doc/tdenginereader.md b/tdenginereader/doc/tdenginereader.md
deleted file mode 100644
index 284b8e6d..00000000
--- a/tdenginereader/doc/tdenginereader.md
+++ /dev/null
@@ -1,145 +0,0 @@
-# DataX TDengineReader
-
-## 1 快速介绍
-
-TDengineReader 插件实现了 TDengine 读取数据的功能。
-
-## 2 实现原理
-
-TDengineReader 通过TDengine的JDBC driver查询获取数据。
-
-## 3 功能说明
-
-### 3.1 配置样例
-
-```json
-{
- "job": {
- "content": [
- {
- "reader": {
- "name": "tdenginereader",
- "parameter": {
- "host": "192.168.1.82",
- "port": 6030,
- "db": "test",
- "user": "root",
- "password": "taosdata",
- "sql": "select * from weather",
- "beginDateTime": "2021-01-01 00:00:00",
- "endDateTime": "2021-01-02 00:00:00",
- "splitInterval": "1h"
- }
- },
- "writer": {
- "name": "streamwriter",
- "parameter": {
- "encoding": "UTF-8",
- "print": true
- }
- }
- }
- ],
- "setting": {
- "speed": {
- "channel": 1
- }
- }
- }
-}
-```
-
-### 3.2 参数说明
-
-* **host**
- * 描述:TDengine实例的host。
- * 必选:是
- * 默认值:无
-* **port**
- * 描述:TDengine实例的port。
- * 必选:是
- * 默认值:无
-* **dbname**
- * 描述:目的数据库的名称。
- * 必选:是
- * 默认值:无
-* **username**
- * 描述:TDengine实例的用户名
- * 必选:是
- * 默认值:无
-* **password**
- * 描述:TDengine实例的密码
- * 必选:是
- * 默认值:无
-* **sql**
- * 描述:用来筛选迁移数据的sql
- * 必选:是
- * 默认值:无
-* **beginDateTime**
- * 描述:TDengine实例的密码
- * 必选:是
- * 默认值:无
-* **endDateTime**
- * 描述:
- * 必选:是
- * 默认值:无
-* **splitInterval**
- * 描述:按照splitInterval来划分task, 每splitInterval创建一个task
- * 必选:否
- * 默认值:1h
-
-### 3.3 类型转换
-
-
-## 4 性能报告
-
-### 4.1 环境准备
-
-#### 4.1.1 数据特征
-
-建表语句:
-
-单行记录类似于:
-
-#### 4.1.2 机器参数
-
-* 执行DataX的机器参数为:
- 1. cpu:
- 2. mem:
- 3. net: 千兆双网卡
- 4. disc: DataX 数据不落磁盘,不统计此项
-
-* TDengine数据库机器参数为:
- 1. cpu:
- 2. mem:
- 3. net: 千兆双网卡
- 4. disc:
-
-#### 4.1.3 DataX jvm 参数
-
- -Xms1024m -Xmx1024m -XX:+HeapDumpOnOutOfMemoryError
-
-### 4.2 测试报告
-
-#### 4.2.1 单表测试报告
-
-| 通道数| DataX速度(Rec/s)|DataX流量(MB/s)| DataX机器网卡流出流量(MB/s)|DataX机器运行负载|DB网卡进入流量(MB/s)|DB运行负载|DB TPS|
-|--------| --------|--------|--------|--------|--------|--------|--------|
-|1| | | | | | | |
-|4| | | | | | | |
-|8| | | | | | | |
-|16| | | | | | | |
-|32| | | | | | | |
-
-说明:
-1. 这里的单表,主键类型为 bigint(20),自增。
-2. batchSize 和 通道个数,对性能影响较大。
-
-#### 4.2.4 性能测试小结
-
-1.
-2.
-
-## 5 约束限制
-
-## FAQ
\ No newline at end of file
diff --git a/tdenginereader/pom.xml b/tdenginereader/pom.xml
deleted file mode 100644
index 66c64eaf..00000000
--- a/tdenginereader/pom.xml
+++ /dev/null
@@ -1,90 +0,0 @@
-
-
-
- datax-all
- com.alibaba.datax
- 0.0.1-SNAPSHOT
-
- 4.0.0
-
- tdenginereader
-
-
- 8
- 8
-
-
-
-
- com.alibaba.datax
- datax-common
- ${datax-project-version}
-
-
- slf4j-log4j12
- org.slf4j
-
-
-
-
-
- junit
- junit
- ${junit-version}
- test
-
-
-
-
-
-
-
-
- maven-compiler-plugin
-
- ${jdk-version}
- ${jdk-version}
- ${project-sourceEncoding}
-
-
-
- maven-assembly-plugin
-
-
- src/main/assembly/package.xml
-
- datax
-
-
-
- dwzip
- package
-
- single
-
-
-
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
- 2.12.4
-
-
-
- **/*Test.java
-
-
-
-
- true
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/tdenginereader/src/main/assembly/package.xml b/tdenginereader/src/main/assembly/package.xml
deleted file mode 100755
index b52f20fb..00000000
--- a/tdenginereader/src/main/assembly/package.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-
-
-
- dir
-
- false
-
-
- src/main/resources
-
- plugin.json
- plugin_job_template.json
-
- plugin/reader/tdenginereader
-
-
- target/
-
- tdenginereader-0.0.1-SNAPSHOT.jar
-
- plugin/reader/tdenginereader
-
-
-
-
-
- false
- plugin/reader/tdenginereader/libs
- runtime
-
-
-
diff --git a/tdenginereader/src/main/java/com/alibaba/datax/plugin/reader/TDengineReader.java b/tdenginereader/src/main/java/com/alibaba/datax/plugin/reader/TDengineReader.java
deleted file mode 100644
index cec88eda..00000000
--- a/tdenginereader/src/main/java/com/alibaba/datax/plugin/reader/TDengineReader.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package com.alibaba.datax.plugin.reader;
-
-import com.alibaba.datax.common.plugin.RecordSender;
-import com.alibaba.datax.common.spi.Reader;
-import com.alibaba.datax.common.util.Configuration;
-
-import java.util.List;
-
-public class TDengineReader extends Reader {
-
- public static class Job extends Reader.Job {
-
- @Override
- public void init() {
-
- }
-
- @Override
- public void destroy() {
-
- }
-
- @Override
- public List split(int adviceNumber) {
- return null;
- }
- }
-
- public static class Task extends Reader.Task {
-
- @Override
- public void init() {
-
- }
-
- @Override
- public void destroy() {
-
- }
-
- @Override
- public void startRead(RecordSender recordSender) {
-
- }
- }
-
-}
diff --git a/tdenginereader/src/main/resources/plugin.json b/tdenginereader/src/main/resources/plugin.json
deleted file mode 100755
index dc91982c..00000000
--- a/tdenginereader/src/main/resources/plugin.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "name": "tdenginereader",
- "class": "com.alibaba.datax.plugin.reader.TDengineReader",
- "description": {
- "useScene": "data migration from tdengine",
- "mechanism": "use JNI to read data from tdengine."
- },
- "developer": "zyyang-taosdata"
-}
\ No newline at end of file
diff --git a/tdenginereader/src/main/resources/plugin_job_template.json b/tdenginereader/src/main/resources/plugin_job_template.json
deleted file mode 100644
index 3e09dffc..00000000
--- a/tdenginereader/src/main/resources/plugin_job_template.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "name": "tdenginereader",
- "parameter": {
- "host": "127.0.0.1",
- "port": 6030,
- "db": "test",
- "user": "root",
- "password": "taosdata",
- "sql": "select * from weather",
- "beginDateTime": "2021-01-01 00:00:00",
- "endDateTime": "2021-01-02 00:00:00",
- "splitInterval": "1h"
- }
-}
\ No newline at end of file
diff --git a/tdenginewriter/doc/tdenginewriter-CN.md b/tdenginewriter/doc/tdenginewriter-CN.md
index c20b3b86..abd92de5 100644
--- a/tdenginewriter/doc/tdenginewriter-CN.md
+++ b/tdenginewriter/doc/tdenginewriter-CN.md
@@ -41,7 +41,7 @@ TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据rea
"parameter": {
"host": "192.168.1.180",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata"
}
@@ -65,7 +65,7 @@ TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据rea
| port | TDengine实例的port | 是 | 无 |
| user | TDengine实例的用户名 | 否 | root |
| password | TDengine实例的密码 | 否 | taosdata |
-| dbname | 目的数据库的名称 | 是 | 无 |
+| dbName | 目的数据库的名称 | 是 | 无 |
| batchSize | 每次批量插入多少记录 | 否 | 1 |
@@ -141,7 +141,7 @@ TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据rea
"parameter": {
"host": "localhost",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata",
"stable": "stock",
@@ -176,12 +176,14 @@ TDengineWriter 通过 DataX 框架获取 Reader生成的协议数据,根据rea
| port | TDengine实例的port | 是 | 无 |
| user | TDengine实例的用户名 | 否 | root |
| password | TDengine实例的密码 | 否 | taosdata |
-| dbname | 目的数据库的名称 | 是 | 无 |
+| dbName | 目的数据库的名称 | 是 | 无 |
| batchSize | 每次批量插入多少记录 | 否 | 1000 |
| stable | 目标超级表的名称 | 是(OpenTSDB除外) | 无 |
-| tagColumn | 标签列的列名和位置 | 否 | 无 | 位置索引均从0开始 |
-| fieldColumn | 字段列的列名和位置 | 否 | 无 | |
-| timestampColumn | 时间戳列的列名和位置 | 否 | 无 | 时间戳列只能有一个 |
+| tagColumn | 格式:{tagName1: tagInd1, tagName2: tagInd2}, 标签列在写插件收到的Record中的位置和列名 | 否 | 无 | 位置索引均从0开始, tagInd如果为字符串, 表示固定标签值,不需要从源数据中获取 |
+| fieldColumn | 格式:{fdName1: fdInd1, fdName2: fdInd2}, 字段列在写插件收到的Record中的位置和列名 | 否 | 无 | |
+| timestampColumn | 格式:{tsColName: tsColIndex}, 时间戳列在写插件收到的Record中的位置和列名 | 否 | 无 | 时间戳列只能有一个 |
+
+示例配置中tagColumn有一个industry,它的值是一个固定的字符串“energy”, 作用是给导入的所有数据加一个值为"energy"的固定标签industry。这个应用场景可以是:在源库中,有多个设备采集的数据分表存储,设备名就是表名,可以用这个机制把设备名称转化为标签。
#### 3.2.3 自动建表规则
##### 3.2.3.1 超级表创建规则
@@ -237,7 +239,7 @@ TAGS(
| string, array | String | NCHAR(64) |
| date | Date | TIMESTAMP |
| boolean | Boolean | BOOL |
-| bytes | Bytes | BINARY |
+| bytes | Bytes | BINARY(64) |
### 3.3 从关系型数据库到TDengine
writer部分的配置规则和上述MongoDB的示例是一样的,这里给出一个MySQL的示例。
@@ -287,7 +289,7 @@ CREATE TABLE IF NOT EXISTS weather(
"parameter": {
"host": "127.0.0.1",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata",
"batchSize": 1000,
@@ -396,4 +398,8 @@ TDengine要求每个表第一列是时间戳列,后边是普通字段,最后
### 为什么插入10年前的数据会抛异常`TDengine ERROR (2350): failed to execute batch bind` ?
-因为创建数据库的时候,默认保留10年的数据。可以手动指定要保留多长时间的数据,比如:`CREATE DATABASE power KEEP 36500;`。
\ No newline at end of file
+因为创建数据库的时候,默认保留10年的数据。可以手动指定要保留多长时间的数据,比如:`CREATE DATABASE power KEEP 36500;`。
+
+### 如果编译的时候某些插件的依赖找不到怎么办?
+
+如果这个插件不是必须的,可以注释掉根目录下的pom.xml中的对应插件。
\ No newline at end of file
diff --git a/tdenginewriter/doc/tdenginewriter.md b/tdenginewriter/doc/tdenginewriter.md
index 42bdd04f..fd190570 100644
--- a/tdenginewriter/doc/tdenginewriter.md
+++ b/tdenginewriter/doc/tdenginewriter.md
@@ -38,7 +38,7 @@ TDengineWriter get records from DataX Framework that are generated from reader s
"parameter": {
"host": "192.168.1.180",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata"
}
@@ -62,7 +62,7 @@ TDengineWriter get records from DataX Framework that are generated from reader s
| port | port of TDengine | Yes | |
| user | use name of TDengine | No | root |
| password | password of TDengine | No | taosdata |
-| dbname | name of target database | No | |
+| dbName | name of target database | No | |
| batchSize | batch size of insert operation | No | 1 |
@@ -136,7 +136,7 @@ TDengineWriter get records from DataX Framework that are generated from reader s
"parameter": {
"host": "localhost",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata",
"stable": "stock",
@@ -172,13 +172,15 @@ TDengineWriter get records from DataX Framework that are generated from reader s
| port | port of TDengine | Yes | |
| user | user name of TDengine | No | root |
| password | password of TDengine | No | taosdata |
-| dbname | name of target database | Yes | |
+| dbName | name of target database | Yes | |
| batchSize | batch size of insert operation | No | 1000 |
| stable | name of target super table | Yes(except for OpenTSDB) | |
-| tagColumn | name and position of tag columns in the record from reader | No | | index starts with 0 |
-| fieldColumn | name and position of data columns in the record from reader | No | | |
+| tagColumn | name and position of tag columns in the record from reader, format:{tagName1: tagInd1, tagName2: tagInd2} | No | | index starts with 0 |
+| fieldColumn | name and position of data columns in the record from reader, format: {fdName1: fdInd1, fdName2: fdInd2} | No | | |
| timestampColumn | name and position of timestamp column in the record from reader | No | | |
+**Note**: You see that the value of tagColumn "industry" is a fixed string, this ia a good feature of this plugin. Think about this scenario: you have many tables with the structure and one table corresponds to one device. You want to use the device number as a tag in the target super table, then this feature is designed for you.
+
#### 3.2.3 Auto table creating
##### 3.2.3.1 Rules
@@ -233,7 +235,7 @@ Then the first columns received by this writer plugin must represent timestamp,
| string, array | String | NCHAR(64) |
| date | Date | TIMESTAMP |
| boolean | Boolean | BOOL |
-| bytes | Bytes | BINARY |
+| bytes | Bytes | BINARY(64) |
### 3.3 From Relational Database to TDengine
@@ -284,7 +286,7 @@ CREATE TABLE IF NOT EXISTS weather(
"parameter": {
"host": "127.0.0.1",
"port": 6030,
- "dbname": "test",
+ "dbName": "test",
"user": "root",
"password": "taosdata",
"batchSize": 1000,
@@ -346,4 +348,9 @@ By the first batch of records it received.
### Why can't I insert data of 10 years ago? Do this will get error: `TDengine ERROR (2350): failed to execute batch bind`.
-Because the database you created only keep 10 years data by default, you can create table like this: `CREATE DATABASE power KEEP 36500;`, in order to enlarge the time period to 100 years.
\ No newline at end of file
+Because the database you created only keep 10 years data by default, you can create table like this: `CREATE DATABASE power KEEP 36500;`, in order to enlarge the time period to 100 years.
+
+
+### What should I do if some dependencies of a plugin can't be found?
+
+I this plugin is not necessary for you, just remove it from pom.xml under project's root directory.
diff --git a/tdenginewriter/src/main/assembly/package.xml b/tdenginewriter/src/main/assembly/package.xml
old mode 100755
new mode 100644
diff --git a/tdenginewriter/src/main/java/com/alibaba/datax/plugin/writer/tdenginewriter/Key.java b/tdenginewriter/src/main/java/com/alibaba/datax/plugin/writer/tdenginewriter/Key.java
index 090a7999..a061e97f 100644
--- a/tdenginewriter/src/main/java/com/alibaba/datax/plugin/writer/tdenginewriter/Key.java
+++ b/tdenginewriter/src/main/java/com/alibaba/datax/plugin/writer/tdenginewriter/Key.java
@@ -3,7 +3,7 @@ package com.alibaba.datax.plugin.writer.tdenginewriter;
public class Key {
public static final String HOST = "host";
public static final String PORT = "port";
- public static final String DBNAME = "dbname";
+ public static final String DBNAME = "dbName";
public static final String USER = "user";
public static final String PASSWORD = "password";
public static final String BATCH_SIZE = "batchSize";
diff --git a/tdenginewriter/src/main/resources/plugin.json b/tdenginewriter/src/main/resources/plugin.json
old mode 100755
new mode 100644