From 714a01d5c0702ad61e5e659f06e806afa5c73766 Mon Sep 17 00:00:00 2001 From: menshibin Date: Mon, 17 Feb 2025 11:50:38 +0800 Subject: [PATCH] docs: updata flink version --- docs/en/10-third-party/01-collection/flink.md | 3 ++- docs/examples/flink/Main.java | 24 +++++++++---------- .../10-third-party/01-collection/12-flink.md | 3 ++- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/docs/en/10-third-party/01-collection/flink.md b/docs/en/10-third-party/01-collection/flink.md index b225a2d610..6974b8788b 100644 --- a/docs/en/10-third-party/01-collection/flink.md +++ b/docs/en/10-third-party/01-collection/flink.md @@ -26,6 +26,7 @@ Flink Connector supports all platforms that can run Flink 1.19 and above version | Flink Connector Version | Major Changes | TDengine Version| |-------------------------| ------------------------------------ | ---------------- | +| 2.1.0 | Fix the issue of writing varchar types from different data sources.| - | | 2.0.2 | The Table Sink supports types such as RowKind.UPDATE_BEFORE, RowKind.UPDATE_AFTER, and RowKind.DELETE.| - | | 2.0.1 | Sink supports writing types from Rowdata implementations.| - | | 2.0.0 | 1.Support SQL queries on data in TDengine database.
2. Support CDC subscription to data in TDengine database.
3. Supports reading and writing to TDengine database using Table SQL. | 3.3.5.1 and higher| @@ -116,7 +117,7 @@ If using Maven to manage a project, simply add the following dependencies in pom com.taosdata.flink flink-connector-tdengine - 2.0.2 + 2.1.0 ``` diff --git a/docs/examples/flink/Main.java b/docs/examples/flink/Main.java index 50a507d1de..b6a0daa9f4 100644 --- a/docs/examples/flink/Main.java +++ b/docs/examples/flink/Main.java @@ -198,7 +198,7 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") ", current: " + rowData.getFloat(1) + ", voltage: " + rowData.getInt(2) + ", phase: " + rowData.getFloat(3) + - ", location: " + new String(rowData.getBinary(4))); + ", location: " + rowData.getString(4).toString()); sb.append("\n"); return sb.toString(); }); @@ -273,7 +273,7 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") ", current: " + row.getFloat(1) + ", voltage: " + row.getInt(2) + ", phase: " + row.getFloat(3) + - ", location: " + new String(row.getBinary(4))); + ", location: " + rowData.getString(4).toString()); sb.append("\n"); totalVoltage.addAndGet(row.getInt(2)); } @@ -311,7 +311,7 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") ", current: " + rowData.getFloat(1) + ", voltage: " + rowData.getInt(2) + ", phase: " + rowData.getFloat(3) + - ", location: " + new String(rowData.getBinary(4))); + ", location: " + rowData.getString(4).toString()); sb.append("\n"); totalVoltage.addAndGet(rowData.getInt(2)); return sb.toString(); @@ -353,7 +353,7 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") ", current: " + row.getFloat(1) + ", voltage: " + row.getInt(2) + ", phase: " + row.getFloat(3) + - ", location: " + new String(row.getBinary(4))); + ", location: " + rowData.getString(4).toString()); sb.append("\n"); totalVoltage.addAndGet(row.getInt(2)); } @@ -489,9 +489,9 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") " `current` FLOAT," + " voltage INT," + " phase FLOAT," + - " location VARBINARY," + + " location VARCHAR(255)," + " groupid INT," + - " tbname VARBINARY" + + " tbname VARCHAR(255)" + ") WITH (" + " 'connector' = 'tdengine-connector'," + " 'td.jdbc.url' = 'jdbc:TAOS-WS://localhost:6041/power?user=root&password=taosdata'," + @@ -506,9 +506,9 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") " `current` FLOAT," + " voltage INT," + " phase FLOAT," + - " location VARBINARY," + + " location VARCHAR(255)," + " groupid INT," + - " tbname VARBINARY" + + " tbname VARCHAR(255)" + ") WITH (" + " 'connector' = 'tdengine-connector'," + " 'td.jdbc.mode' = 'sink'," + @@ -535,9 +535,9 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") " `current` FLOAT," + " voltage INT," + " phase FLOAT," + - " location VARBINARY," + + " location VARCHAR(255)," + " groupid INT," + - " tbname VARBINARY" + + " tbname VARCHAR(255)" + ") WITH (" + " 'connector' = 'tdengine-connector'," + " 'bootstrap.servers' = 'localhost:6041'," + @@ -554,9 +554,9 @@ splitSql.setSelect("ts, current, voltage, phase, groupid, location") " `current` FLOAT," + " voltage INT," + " phase FLOAT," + - " location VARBINARY," + + " location VARCHAR(255)," + " groupid INT," + - " tbname VARBINARY" + + " tbname VARCHAR(255)" + ") WITH (" + " 'connector' = 'tdengine-connector'," + " 'td.jdbc.mode' = 'cdc'," + diff --git a/docs/zh/10-third-party/01-collection/12-flink.md b/docs/zh/10-third-party/01-collection/12-flink.md index d74f6cad6d..c223f6ba95 100644 --- a/docs/zh/10-third-party/01-collection/12-flink.md +++ b/docs/zh/10-third-party/01-collection/12-flink.md @@ -24,6 +24,7 @@ Flink Connector 支持所有能运行 Flink 1.19 及以上版本的平台。 ## 版本历史 | Flink Connector 版本 | 主要变化 | TDengine 版本 | | ------------------| ------------------------------------ | ---------------- | +| 2.1.0 | 修复不同数据源varchar类型写入问题| - | | 2.0.2 | Table Sink 支持 RowKind.UPDATE_BEFORE、RowKind.UPDATE_AFTER 和 RowKind.DELETE 类型| - | | 2.0.1 | Sink 支持对所有继承自 RowData 并已实现的类型进行数据写入| - | | 2.0.0 | 1. 支持 SQL 查询 TDengine 数据库中的数据
2. 支持 CDC 订阅 TDengine 数据库中的数据
3. 支持 Table SQL 方式读取和写入 TDengine 数据库| 3.3.5.1 及以上版本 | @@ -113,7 +114,7 @@ env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.AT_LEAST_ONCE); com.taosdata.flink flink-connector-tdengine - 2.0.2 + 2.1.0 ```