From 3c5b8037e3846dcd8d8a3a85eafd0ecfd9b940c7 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 23 Jan 2025 08:46:29 +0000
Subject: [PATCH 001/105] feat/TS-5927-long-password
---
include/common/tglobal.h | 1 +
include/common/tmsg.h | 2 +
include/util/tdef.h | 3 +-
source/common/src/msg/tmsg.c | 8 ++++
source/common/src/tglobal.c | 8 +++-
source/dnode/mnode/impl/src/mndUser.c | 61 +++++++++++++++++++++------
tests/army/cluster/strongPassword.py | 52 +++++++++++++++++++++++
tests/parallel_test/cases.task | 1 +
8 files changed, 121 insertions(+), 15 deletions(-)
create mode 100644 tests/army/cluster/strongPassword.py
diff --git a/include/common/tglobal.h b/include/common/tglobal.h
index 4e9a9bd801..5990db467a 100644
--- a/include/common/tglobal.h
+++ b/include/common/tglobal.h
@@ -69,6 +69,7 @@ extern EEncryptAlgor tsiEncryptAlgorithm;
extern EEncryptScope tsiEncryptScope;
// extern char tsAuthCode[];
extern char tsEncryptKey[];
+extern int8_t tsEnableStrongPassword;
// common
extern int32_t tsMaxShellConns;
diff --git a/include/common/tmsg.h b/include/common/tmsg.h
index 82eaa2359e..5fc02a068e 100644
--- a/include/common/tmsg.h
+++ b/include/common/tmsg.h
@@ -1089,6 +1089,7 @@ typedef struct {
char* sql;
int8_t isImport;
int8_t createDb;
+ char longPass[TSDB_USET_PASSWORD_LONGLEN];
} SCreateUserReq;
int32_t tSerializeSCreateUserReq(void* buf, int32_t bufLen, SCreateUserReq* pReq);
@@ -1159,6 +1160,7 @@ typedef struct {
int64_t privileges;
int32_t sqlLen;
char* sql;
+ char longPass[TSDB_USET_PASSWORD_LONGLEN];
} SAlterUserReq;
int32_t tSerializeSAlterUserReq(void* buf, int32_t bufLen, SAlterUserReq* pReq);
diff --git a/include/util/tdef.h b/include/util/tdef.h
index f08697b0d4..1facb2074d 100644
--- a/include/util/tdef.h
+++ b/include/util/tdef.h
@@ -297,9 +297,10 @@ typedef enum ELogicConditionType {
#define TSDB_AUTH_LEN 16
#define TSDB_PASSWORD_MIN_LEN 8
-#define TSDB_PASSWORD_MAX_LEN 16
+#define TSDB_PASSWORD_MAX_LEN 255
#define TSDB_PASSWORD_LEN 32
#define TSDB_USET_PASSWORD_LEN 129
+#define TSDB_USET_PASSWORD_LONGLEN 256
#define TSDB_VERSION_LEN 32
#define TSDB_LABEL_LEN 16
#define TSDB_JOB_STATUS_LEN 32
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index 7a51669d46..ff60c120d1 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -2007,6 +2007,7 @@ int32_t tSerializeSCreateUserReq(void *buf, int32_t bufLen, SCreateUserReq *pReq
ENCODESQL();
TAOS_CHECK_EXIT(tEncodeI8(&encoder, pReq->isImport));
TAOS_CHECK_EXIT(tEncodeI8(&encoder, pReq->createDb));
+ TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->longPass));
tEndEncode(&encoder);
@@ -2047,6 +2048,9 @@ int32_t tDeserializeSCreateUserReq(void *buf, int32_t bufLen, SCreateUserReq *pR
TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pReq->createDb));
TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pReq->isImport));
}
+ if (!tDecodeIsEnd(&decoder)) {
+ TAOS_CHECK_EXIT(tDecodeCStrTo(&decoder, pReq->longPass));
+ }
tEndDecode(&decoder);
@@ -2402,6 +2406,7 @@ int32_t tSerializeSAlterUserReq(void *buf, int32_t bufLen, SAlterUserReq *pReq)
TAOS_CHECK_EXIT(tEncodeI64(&encoder, pReq->privileges));
ENCODESQL();
TAOS_CHECK_EXIT(tEncodeU8(&encoder, pReq->flag));
+ TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->longPass));
tEndEncode(&encoder);
_exit:
@@ -2453,6 +2458,9 @@ int32_t tDeserializeSAlterUserReq(void *buf, int32_t bufLen, SAlterUserReq *pReq
if (!tDecodeIsEnd(&decoder)) {
TAOS_CHECK_EXIT(tDecodeU8(&decoder, &pReq->flag));
}
+ if (!tDecodeIsEnd(&decoder)) {
+ TAOS_CHECK_EXIT(tDecodeCStrTo(&decoder, pReq->longPass));
+ }
tEndDecode(&decoder);
_exit:
diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c
index 83b1845fd4..a16457dccd 100644
--- a/source/common/src/tglobal.c
+++ b/source/common/src/tglobal.c
@@ -58,6 +58,7 @@ EEncryptScope tsiEncryptScope = 0;
// char tsAuthCode[500] = {0};
// char tsEncryptKey[17] = {0};
char tsEncryptKey[17] = {0};
+int8_t tsEnableStrongPassword = 1;
// common
int32_t tsMaxShellConns = 50000;
@@ -838,6 +839,7 @@ static int32_t taosAddServerCfg(SConfig *pCfg) {
TAOS_CHECK_RETURN(cfgAddString(pCfg, "encryptAlgorithm", tsEncryptAlgorithm, CFG_SCOPE_SERVER, CFG_DYN_NONE, CFG_CATEGORY_GLOBAL));
TAOS_CHECK_RETURN(cfgAddString(pCfg, "encryptScope", tsEncryptScope, CFG_SCOPE_SERVER, CFG_DYN_NONE,CFG_CATEGORY_GLOBAL));
+ TAOS_CHECK_RETURN(cfgAddBool(pCfg, "enableStrongPassword", tsEnableStrongPassword, CFG_SCOPE_SERVER, CFG_DYN_SERVER,CFG_CATEGORY_GLOBAL));
TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "statusInterval", tsStatusInterval, 1, 30, CFG_SCOPE_SERVER, CFG_DYN_SERVER_LAZY,CFG_CATEGORY_GLOBAL));
TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "maxShellConns", tsMaxShellConns, 10, 50000000, CFG_SCOPE_SERVER, CFG_DYN_SERVER_LAZY, CFG_CATEGORY_LOCAL));
@@ -1527,6 +1529,9 @@ static int32_t taosSetServerCfg(SConfig *pCfg) {
TAOS_CHECK_RETURN(taosCheckCfgStrValueLen(pItem->name, pItem->str, 100));
tstrncpy(tsEncryptScope, pItem->str, 100);
+ TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "enableStrongPassword");
+ tsEnableStrongPassword = pItem->i32;
+
TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "numOfRpcThreads");
tsNumOfRpcThreads = pItem->i32;
@@ -2518,7 +2523,8 @@ static int32_t taosCfgDynamicOptionsForServer(SConfig *pCfg, const char *name) {
{"arbHeartBeatIntervalSec", &tsArbHeartBeatIntervalSec},
{"arbCheckSyncIntervalSec", &tsArbCheckSyncIntervalSec},
{"arbSetAssignedTimeoutSec", &tsArbSetAssignedTimeoutSec},
- {"queryNoFetchTimeoutSec", &tsQueryNoFetchTimeoutSec}};
+ {"queryNoFetchTimeoutSec", &tsQueryNoFetchTimeoutSec},
+ {"enableStrongPassword", &tsEnableStrongPassword}};
if ((code = taosCfgSetOption(debugOptions, tListLen(debugOptions), pItem, true)) != TSDB_CODE_SUCCESS) {
code = taosCfgSetOption(options, tListLen(options), pItem, false);
diff --git a/source/dnode/mnode/impl/src/mndUser.c b/source/dnode/mnode/impl/src/mndUser.c
index 5b2a5fa8aa..8572c954c8 100644
--- a/source/dnode/mnode/impl/src/mndUser.c
+++ b/source/dnode/mnode/impl/src/mndUser.c
@@ -1705,11 +1705,22 @@ static int32_t mndCreateUser(SMnode *pMnode, char *acct, SCreateUserReq *pCreate
int32_t code = 0;
int32_t lino = 0;
SUserObj userObj = {0};
+ char pass[TSDB_USET_PASSWORD_LONGLEN] = {0};
+
+ int32_t len = strlen(pCreate->longPass);
+
+ if (len > 0) {
+ strncpy(pass, pCreate->longPass, TSDB_USET_PASSWORD_LONGLEN);
+ } else {
+ len = strlen(pCreate->pass);
+ strncpy(pass, pCreate->pass, TSDB_PASSWORD_LEN);
+ }
+
if (pCreate->isImport != 1) {
- taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
+ taosEncryptPass_c((uint8_t *)pass, strlen(pass), userObj.pass);
} else {
// mInfo("pCreate->pass:%s", pCreate->eass)
- memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
+ memcpy(userObj.pass, pass, TSDB_PASSWORD_LEN);
}
tstrncpy(userObj.user, pCreate->user, TSDB_USER_LEN);
tstrncpy(userObj.acct, acct, TSDB_USER_LEN);
@@ -1884,16 +1895,28 @@ static int32_t mndProcessCreateUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
- int32_t len = strlen(createReq.pass);
+ char pass[TSDB_USET_PASSWORD_LONGLEN] = {0};
+
+ int32_t len = strlen(createReq.longPass);
+
+ if (len > 0) {
+ strncpy(pass, createReq.longPass, TSDB_USET_PASSWORD_LONGLEN);
+ } else {
+ len = strlen(createReq.pass);
+ strncpy(pass, createReq.pass, TSDB_PASSWORD_LEN);
+ }
+
if (createReq.isImport != 1) {
- if (mndCheckPasswordMinLen(createReq.pass, len) != 0) {
+ if (mndCheckPasswordMinLen(pass, len) != 0) {
TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
}
- if (mndCheckPasswordMaxLen(createReq.pass, len) != 0) {
+ if (mndCheckPasswordMaxLen(pass, len) != 0) {
TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
}
- if (mndCheckPasswordFmt(createReq.pass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ if (tsEnableStrongPassword) {
+ if (mndCheckPasswordFmt(pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ }
}
}
@@ -2376,16 +2399,27 @@ static int32_t mndProcessAlterUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
+ char userSetPass[TSDB_USET_PASSWORD_LONGLEN] = {0};
+ int32_t len = strlen(alterReq.longPass);
+
if (TSDB_ALTER_USER_PASSWD == alterReq.alterType) {
- int32_t len = strlen(alterReq.pass);
- if (mndCheckPasswordMinLen(alterReq.pass, len) != 0) {
+ if (len > 0) {
+ strncpy(userSetPass, alterReq.longPass, TSDB_USET_PASSWORD_LONGLEN);
+ } else {
+ len = strlen(alterReq.pass);
+ strncpy(userSetPass, alterReq.pass, TSDB_USET_PASSWORD_LEN);
+ }
+
+ if (mndCheckPasswordMinLen(userSetPass, len) != 0) {
TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
}
- if (mndCheckPasswordMaxLen(alterReq.pass, len) != 0) {
+ if (mndCheckPasswordMaxLen(userSetPass, len) != 0) {
TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
}
- if (mndCheckPasswordFmt(alterReq.pass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ if (tsEnableStrongPassword) {
+ if (mndCheckPasswordFmt(userSetPass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ }
}
}
@@ -2402,7 +2436,8 @@ static int32_t mndProcessAlterUserReq(SRpcMsg *pReq) {
if (alterReq.alterType == TSDB_ALTER_USER_PASSWD) {
char pass[TSDB_PASSWORD_LEN + 1] = {0};
- taosEncryptPass_c((uint8_t *)alterReq.pass, strlen(alterReq.pass), pass);
+
+ taosEncryptPass_c((uint8_t *)userSetPass, len, pass);
(void)memcpy(newUser.pass, pass, TSDB_PASSWORD_LEN);
if (0 != strncmp(pUser->pass, pass, TSDB_PASSWORD_LEN)) {
++newUser.passVersion;
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
new file mode 100644
index 0000000000..01dba7f394
--- /dev/null
+++ b/tests/army/cluster/strongPassword.py
@@ -0,0 +1,52 @@
+import taos
+import sys
+import os
+import subprocess
+import glob
+import shutil
+import time
+
+from frame.log import *
+from frame.cases import *
+from frame.sql import *
+from frame.srvCtl import *
+from frame.caseBase import *
+from frame import *
+from frame.autogen import *
+from frame import epath
+# from frame.server.dnodes import *
+# from frame.server.cluster import *
+
+
+class TDTestCase(TBase):
+
+ def init(self, conn, logSql, replicaVar=1):
+ super(TDTestCase, self).init(conn, logSql, replicaVar=1, checkColName="c1")
+
+ tdSql.init(conn.cursor(), logSql)
+
+ def run(self):
+ # strong
+ tdSql.error("create user test pass '12345678' sysinfo 0;", expectErrInfo="Invalid password format")
+
+ tdSql.execute("create user test pass '12345678@Abc' sysinfo 0;")
+
+ tdSql.error("alter user test pass '23456789'", expectErrInfo="Invalid password format")
+
+ tdSql.execute("alter user test pass '23456789@Abc';")
+
+ # change setting
+ tdSql.execute("ALTER ALL DNODES 'enableStrongPassword' '0'")
+
+ # weak
+ tdSql.execute("create user test1 pass '12345678' sysinfo 0;")
+
+ tdSql.execute("alter user test1 pass '12345678';")
+
+ def stop(self):
+ tdSql.close()
+ tdLog.success(f"{__file__} successfully executed")
+
+
+tdCases.addLinux(__file__, TDTestCase())
+tdCases.addWindows(__file__, TDTestCase())
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 0201c88d2b..3486b04e40 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -72,6 +72,7 @@
,,n,army,python3 ./test.py -f tmq/drop_lost_comsumers.py
,,y,army,./pytest.sh python3 ./test.py -f cmdline/taosCli.py
,,n,army,python3 ./test.py -f whole/checkErrorCode.py
+,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPassword.py
#
# system test
From 2f884a8f0a811c57fd21a2ac068aedba7a114239 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Tue, 11 Feb 2025 10:19:26 +0000
Subject: [PATCH 002/105] feat/TS-5927-long-password-fix-cases
---
tests/script/tsim/show/basic.sim | 2 +-
tests/system-test/2-query/db.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/tests/script/tsim/show/basic.sim b/tests/script/tsim/show/basic.sim
index 7569cd832c..4c4f9da912 100644
--- a/tests/script/tsim/show/basic.sim
+++ b/tests/script/tsim/show/basic.sim
@@ -230,7 +230,7 @@ endi
sql_error show create stable t0;
sql show variables;
-if $rows != 93 then
+if $rows != 94 then
return -1
endi
diff --git a/tests/system-test/2-query/db.py b/tests/system-test/2-query/db.py
index 66776e0a23..0c5c9773c8 100644
--- a/tests/system-test/2-query/db.py
+++ b/tests/system-test/2-query/db.py
@@ -47,7 +47,7 @@ class TDTestCase:
def case2(self):
tdSql.query("show variables")
- tdSql.checkRows(93)
+ tdSql.checkRows(94)
for i in range(self.replicaVar):
tdSql.query("show dnode %d variables like 'debugFlag'" % (i + 1))
From 36caaf7aabbb1e2fee5d512aeef7ffdd9945b362 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Wed, 12 Feb 2025 09:50:56 +0000
Subject: [PATCH 003/105] feat/TS-5927-long-password-length
---
include/libs/nodes/cmdnodes.h | 4 ++--
source/libs/parser/src/parAstCreater.c | 10 +++++-----
source/libs/parser/src/parTranslater.c | 4 ++--
tests/script/tsim/user/password.sim | 2 +-
tests/system-test/1-insert/boundary.py | 2 +-
5 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/include/libs/nodes/cmdnodes.h b/include/libs/nodes/cmdnodes.h
index 76db5e29a4..67935c0a38 100644
--- a/include/libs/nodes/cmdnodes.h
+++ b/include/libs/nodes/cmdnodes.h
@@ -299,7 +299,7 @@ typedef struct SAlterTableMultiStmt {
typedef struct SCreateUserStmt {
ENodeType type;
char userName[TSDB_USER_LEN];
- char password[TSDB_USET_PASSWORD_LEN];
+ char password[TSDB_USET_PASSWORD_LONGLEN];
int8_t sysinfo;
int8_t createDb;
int8_t isImport;
@@ -313,7 +313,7 @@ typedef struct SAlterUserStmt {
ENodeType type;
char userName[TSDB_USER_LEN];
int8_t alterType;
- char password[TSDB_USET_PASSWORD_LEN];
+ char password[TSDB_USET_PASSWORD_LONGLEN];
int8_t enable;
int8_t sysinfo;
int8_t createdb;
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 64143ada3e..51fa970299 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -110,7 +110,7 @@ static bool invalidPassword(const char* pPassword) {
static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken, char* pPassword) {
if (NULL == pPasswordToken) {
pCxt->errCode = TSDB_CODE_PAR_SYNTAX_ERROR;
- } else if (pPasswordToken->n >= (TSDB_USET_PASSWORD_LEN + 2)) {
+ } else if (pPasswordToken->n >= (TSDB_USET_PASSWORD_LONGLEN + 2)) {
pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG);
} else {
strncpy(pPassword, pPasswordToken->z, pPasswordToken->n);
@@ -3030,14 +3030,14 @@ _err:
SNode* createCreateUserStmt(SAstCreateContext* pCxt, SToken* pUserName, const SToken* pPassword, int8_t sysinfo,
int8_t createDb, int8_t is_import) {
CHECK_PARSER_STATUS(pCxt);
- char password[TSDB_USET_PASSWORD_LEN + 3] = {0};
+ char password[TSDB_USET_PASSWORD_LONGLEN + 3] = {0};
CHECK_NAME(checkUserName(pCxt, pUserName));
CHECK_NAME(checkPassword(pCxt, pPassword, password));
SCreateUserStmt* pStmt = NULL;
pCxt->errCode = nodesMakeNode(QUERY_NODE_CREATE_USER_STMT, (SNode**)&pStmt);
CHECK_MAKE_NODE(pStmt);
COPY_STRING_FORM_ID_TOKEN(pStmt->userName, pUserName);
- tstrncpy(pStmt->password, password, TSDB_USET_PASSWORD_LEN);
+ tstrncpy(pStmt->password, password, TSDB_USET_PASSWORD_LONGLEN);
pStmt->sysinfo = sysinfo;
pStmt->createDb = createDb;
pStmt->isImport = is_import;
@@ -3056,10 +3056,10 @@ SNode* createAlterUserStmt(SAstCreateContext* pCxt, SToken* pUserName, int8_t al
pStmt->alterType = alterType;
switch (alterType) {
case TSDB_ALTER_USER_PASSWD: {
- char password[TSDB_USET_PASSWORD_LEN] = {0};
+ char password[TSDB_USET_PASSWORD_LONGLEN] = {0};
SToken* pVal = pAlterInfo;
CHECK_NAME(checkPassword(pCxt, pVal, password));
- tstrncpy(pStmt->password, password, TSDB_USET_PASSWORD_LEN);
+ tstrncpy(pStmt->password, password, TSDB_USET_PASSWORD_LONGLEN);
break;
}
case TSDB_ALTER_USER_ENABLE: {
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index f33a6a63c7..a10ab0be16 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -10045,7 +10045,7 @@ static int32_t translateCreateUser(STranslateContext* pCxt, SCreateUserStmt* pSt
createReq.superUser = 0;
createReq.sysInfo = pStmt->sysinfo;
createReq.enable = 1;
- tstrncpy(createReq.pass, pStmt->password, TSDB_USET_PASSWORD_LEN);
+ tstrncpy(createReq.longPass, pStmt->password, TSDB_USET_PASSWORD_LONGLEN);
createReq.isImport = pStmt->isImport;
createReq.createDb = pStmt->createDb;
@@ -10090,7 +10090,7 @@ static int32_t translateAlterUser(STranslateContext* pCxt, SAlterUserStmt* pStmt
alterReq.enable = pStmt->enable;
alterReq.sysInfo = pStmt->sysinfo;
alterReq.createdb = pStmt->createdb ? 1 : 0;
- snprintf(alterReq.pass, sizeof(alterReq.pass), "%s", pStmt->password);
+ snprintf(alterReq.longPass, sizeof(alterReq.pass), "%s", pStmt->password);
if (NULL != pCxt->pParseCxt->db) {
snprintf(alterReq.objname, sizeof(alterReq.objname), "%s", pCxt->pParseCxt->db);
}
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 7d1eff2f0b..4969ee0fa0 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -86,7 +86,7 @@ sql create user user_p6 pass 'abcd!@123456'
sql create user user_p7 pass 'abcd!@1234567'
sql create user user_p8 pass 'abcd!@123456789'
sql create user user_p9 pass 'abcd!@1234567890'
-sql_error create user user_p10 pass 'abcd!@1234567890T'
+sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T'
sql drop user user_p2
sql drop user user_p3
sql drop user user_p4
diff --git a/tests/system-test/1-insert/boundary.py b/tests/system-test/1-insert/boundary.py
index 129b0f275c..aa0264c003 100644
--- a/tests/system-test/1-insert/boundary.py
+++ b/tests/system-test/1-insert/boundary.py
@@ -33,7 +33,7 @@ class TDTestCase:
self.colname_length_boundary = self.boundary.COL_KEY_MAX_LENGTH
self.tagname_length_boundary = self.boundary.TAG_KEY_MAX_LENGTH
self.username_length_boundary = 23
- self.password_length_boundary = 14
+ self.password_length_boundary = 253
def dbname_length_check(self):
dbname_length = randint(1,self.dbname_length_boundary-1)
for dbname in [tdCom.get_long_name(self.dbname_length_boundary),tdCom.get_long_name(dbname_length)]:
From 48123446d6b1d177f8f2e28b664836fc2523d032 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 13 Feb 2025 00:46:08 +0000
Subject: [PATCH 004/105] feat/TS-5927-long-password-fix-cases
---
source/libs/parser/test/parAlterToBalanceTest.cpp | 2 +-
source/libs/parser/test/parInitialCTest.cpp | 2 +-
tests/script/tsim/valgrind/checkError1.sim | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/source/libs/parser/test/parAlterToBalanceTest.cpp b/source/libs/parser/test/parAlterToBalanceTest.cpp
index 172c729f34..e82c0eeab7 100644
--- a/source/libs/parser/test/parAlterToBalanceTest.cpp
+++ b/source/libs/parser/test/parAlterToBalanceTest.cpp
@@ -833,7 +833,7 @@ TEST_F(ParserInitialATest, alterUser) {
ASSERT_EQ(req.sysInfo, expect.sysInfo);
ASSERT_EQ(req.enable, expect.enable);
ASSERT_EQ(std::string(req.user), std::string(expect.user));
- ASSERT_EQ(std::string(req.pass), std::string(expect.pass));
+ ASSERT_EQ(std::string(req.longPass), std::string(expect.pass));
ASSERT_EQ(std::string(req.objname), std::string(expect.objname));
tFreeSAlterUserReq(&req);
});
diff --git a/source/libs/parser/test/parInitialCTest.cpp b/source/libs/parser/test/parInitialCTest.cpp
index 2412bf4e78..b4d277f5d5 100644
--- a/source/libs/parser/test/parInitialCTest.cpp
+++ b/source/libs/parser/test/parInitialCTest.cpp
@@ -1362,7 +1362,7 @@ TEST_F(ParserInitialCTest, createUser) {
ASSERT_EQ(req.sysInfo, expect.sysInfo);
ASSERT_EQ(req.enable, expect.enable);
ASSERT_EQ(std::string(req.user), std::string(expect.user));
- ASSERT_EQ(std::string(req.pass), std::string(expect.pass));
+ ASSERT_EQ(std::string(req.longPass), std::string(expect.pass));
tFreeSCreateUserReq(&req);
});
diff --git a/tests/script/tsim/valgrind/checkError1.sim b/tests/script/tsim/valgrind/checkError1.sim
index 8ac43ebaf3..b81cf80548 100644
--- a/tests/script/tsim/valgrind/checkError1.sim
+++ b/tests/script/tsim/valgrind/checkError1.sim
@@ -120,7 +120,7 @@ if $rows != 3 then
endi
sql show variables;
-if $rows != 88 then
+if $rows != 89 then
return -1
endi
From 55e12e520b4bccd526710417e2430a1afc1b94ca Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 13 Feb 2025 07:45:42 +0000
Subject: [PATCH 005/105] feat/TS-5927-long-password-encrypt-pass
---
include/common/tmsg.h | 5 +-
source/client/inc/clientInt.h | 1 +
source/client/src/clientHb.c | 2 +
source/common/src/msg/tmsg.c | 15 ++-
source/dnode/mnode/impl/src/mndProfile.c | 1 +
source/dnode/mnode/impl/src/mndUser.c | 124 ++----------------
source/libs/parser/src/parAstCreater.c | 34 ++++-
source/libs/parser/src/parTranslater.c | 9 +-
.../parser/test/parAlterToBalanceTest.cpp | 2 +-
source/libs/parser/test/parInitialCTest.cpp | 2 +-
tests/army/cluster/strongPassword.py | 13 ++
11 files changed, 85 insertions(+), 123 deletions(-)
diff --git a/include/common/tmsg.h b/include/common/tmsg.h
index c27dd2f78f..897870ef3f 100644
--- a/include/common/tmsg.h
+++ b/include/common/tmsg.h
@@ -1089,7 +1089,7 @@ typedef struct {
char* sql;
int8_t isImport;
int8_t createDb;
- char longPass[TSDB_USET_PASSWORD_LONGLEN];
+ int8_t passIsMd5;
} SCreateUserReq;
int32_t tSerializeSCreateUserReq(void* buf, int32_t bufLen, SCreateUserReq* pReq);
@@ -1160,7 +1160,7 @@ typedef struct {
int64_t privileges;
int32_t sqlLen;
char* sql;
- char longPass[TSDB_USET_PASSWORD_LONGLEN];
+ int8_t passIsMd5;
} SAlterUserReq;
int32_t tSerializeSAlterUserReq(void* buf, int32_t bufLen, SAlterUserReq* pReq);
@@ -3533,6 +3533,7 @@ typedef struct {
SArray* rsps; // SArray
SMonitorParas monitorParas;
int8_t enableAuditDelete;
+ int8_t enableStrongPass;
} SClientHbBatchRsp;
static FORCE_INLINE uint32_t hbKeyHashFunc(const char* key, uint32_t keyLen) { return taosIntHash_64(key, keyLen); }
diff --git a/source/client/inc/clientInt.h b/source/client/inc/clientInt.h
index 2543a1f3ec..2e1dc56800 100644
--- a/source/client/inc/clientInt.h
+++ b/source/client/inc/clientInt.h
@@ -113,6 +113,7 @@ typedef struct SQueryExecMetric {
typedef struct {
SMonitorParas monitorParas;
int8_t enableAuditDelete;
+ int8_t enableStrongPass;
} SAppInstServerCFG;
struct SAppInstInfo {
int64_t numOfConns;
diff --git a/source/client/src/clientHb.c b/source/client/src/clientHb.c
index b3e288c816..274b7df032 100644
--- a/source/client/src/clientHb.c
+++ b/source/client/src/clientHb.c
@@ -608,6 +608,8 @@ static int32_t hbAsyncCallBack(void *param, SDataBuf *pMsg, int32_t code) {
pInst->serverCfg.monitorParas = pRsp.monitorParas;
pInst->serverCfg.enableAuditDelete = pRsp.enableAuditDelete;
+ pInst->serverCfg.enableStrongPass = pRsp.enableStrongPass;
+ tsEnableStrongPassword = pInst->serverCfg.enableStrongPass;
tscDebug("[monitor] paras from hb, clusterId:%" PRIx64 " monitorParas threshold:%d scope:%d", pInst->clusterId,
pRsp.monitorParas.tsSlowLogThreshold, pRsp.monitorParas.tsSlowLogScope);
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index b54e3c10a1..8bd1ff4f4c 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -575,6 +575,7 @@ int32_t tSerializeSClientHbBatchRsp(void *buf, int32_t bufLen, const SClientHbBa
}
TAOS_CHECK_EXIT(tSerializeSMonitorParas(&encoder, &pBatchRsp->monitorParas));
TAOS_CHECK_EXIT(tEncodeI8(&encoder, pBatchRsp->enableAuditDelete));
+ TAOS_CHECK_EXIT(tEncodeI8(&encoder, pBatchRsp->enableStrongPass));
tEndEncode(&encoder);
_exit:
@@ -623,6 +624,12 @@ int32_t tDeserializeSClientHbBatchRsp(void *buf, int32_t bufLen, SClientHbBatchR
pBatchRsp->enableAuditDelete = 0;
}
+ if (!tDecodeIsEnd(&decoder)) {
+ TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pBatchRsp->enableStrongPass));
+ } else {
+ pBatchRsp->enableStrongPass = 1;
+ }
+
tEndDecode(&decoder);
_exit:
@@ -2007,7 +2014,7 @@ int32_t tSerializeSCreateUserReq(void *buf, int32_t bufLen, SCreateUserReq *pReq
ENCODESQL();
TAOS_CHECK_EXIT(tEncodeI8(&encoder, pReq->isImport));
TAOS_CHECK_EXIT(tEncodeI8(&encoder, pReq->createDb));
- TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->longPass));
+ TAOS_CHECK_EXIT(tEncodeI8(&encoder, pReq->passIsMd5));
tEndEncode(&encoder);
@@ -2049,7 +2056,7 @@ int32_t tDeserializeSCreateUserReq(void *buf, int32_t bufLen, SCreateUserReq *pR
TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pReq->isImport));
}
if (!tDecodeIsEnd(&decoder)) {
- TAOS_CHECK_EXIT(tDecodeCStrTo(&decoder, pReq->longPass));
+ TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pReq->passIsMd5));
}
tEndDecode(&decoder);
@@ -2406,7 +2413,7 @@ int32_t tSerializeSAlterUserReq(void *buf, int32_t bufLen, SAlterUserReq *pReq)
TAOS_CHECK_EXIT(tEncodeI64(&encoder, pReq->privileges));
ENCODESQL();
TAOS_CHECK_EXIT(tEncodeU8(&encoder, pReq->flag));
- TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->longPass));
+ TAOS_CHECK_EXIT(tEncodeU8(&encoder, pReq->passIsMd5));
tEndEncode(&encoder);
_exit:
@@ -2459,7 +2466,7 @@ int32_t tDeserializeSAlterUserReq(void *buf, int32_t bufLen, SAlterUserReq *pReq
TAOS_CHECK_EXIT(tDecodeU8(&decoder, &pReq->flag));
}
if (!tDecodeIsEnd(&decoder)) {
- TAOS_CHECK_EXIT(tDecodeCStrTo(&decoder, pReq->longPass));
+ TAOS_CHECK_EXIT(tDecodeU8(&decoder, &pReq->passIsMd5));
}
tEndDecode(&decoder);
diff --git a/source/dnode/mnode/impl/src/mndProfile.c b/source/dnode/mnode/impl/src/mndProfile.c
index 8fe36ca0c4..fc8ff4bea7 100644
--- a/source/dnode/mnode/impl/src/mndProfile.c
+++ b/source/dnode/mnode/impl/src/mndProfile.c
@@ -722,6 +722,7 @@ static int32_t mndProcessHeartBeatReq(SRpcMsg *pReq) {
batchRsp.monitorParas.tsSlowLogMaxLen = tsSlowLogMaxLen;
batchRsp.monitorParas.tsSlowLogScope = tsSlowLogScope;
batchRsp.enableAuditDelete = tsEnableAuditDelete;
+ batchRsp.enableStrongPass = tsEnableStrongPassword;
int32_t sz = taosArrayGetSize(batchReq.reqs);
for (int i = 0; i < sz; i++) {
diff --git a/source/dnode/mnode/impl/src/mndUser.c b/source/dnode/mnode/impl/src/mndUser.c
index 8572c954c8..c7730e8546 100644
--- a/source/dnode/mnode/impl/src/mndUser.c
+++ b/source/dnode/mnode/impl/src/mndUser.c
@@ -1705,23 +1705,18 @@ static int32_t mndCreateUser(SMnode *pMnode, char *acct, SCreateUserReq *pCreate
int32_t code = 0;
int32_t lino = 0;
SUserObj userObj = {0};
- char pass[TSDB_USET_PASSWORD_LONGLEN] = {0};
-
- int32_t len = strlen(pCreate->longPass);
-
- if (len > 0) {
- strncpy(pass, pCreate->longPass, TSDB_USET_PASSWORD_LONGLEN);
- } else {
- len = strlen(pCreate->pass);
- strncpy(pass, pCreate->pass, TSDB_PASSWORD_LEN);
- }
if (pCreate->isImport != 1) {
- taosEncryptPass_c((uint8_t *)pass, strlen(pass), userObj.pass);
+ if (pCreate->passIsMd5 == 1) {
+ memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
+ } else {
+ taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
+ }
} else {
// mInfo("pCreate->pass:%s", pCreate->eass)
- memcpy(userObj.pass, pass, TSDB_PASSWORD_LEN);
+ memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
}
+
tstrncpy(userObj.user, pCreate->user, TSDB_USER_LEN);
tstrncpy(userObj.acct, acct, TSDB_USER_LEN);
userObj.createdTime = taosGetTimestampMs();
@@ -1816,52 +1811,6 @@ _OVER:
TAOS_RETURN(code);
}
-static int32_t mndCheckPasswordMinLen(const char *pwd, int32_t len) {
- if (len < TSDB_PASSWORD_MIN_LEN) {
- return -1;
- }
- return 0;
-}
-
-static int32_t mndCheckPasswordMaxLen(const char *pwd, int32_t len) {
- if (len > TSDB_PASSWORD_MAX_LEN) {
- return -1;
- }
- return 0;
-}
-
-static int32_t mndCheckPasswordFmt(const char *pwd, int32_t len) {
- if (strcmp(pwd, "taosdata") == 0) {
- return 0;
- }
-
- bool charTypes[4] = {0};
- for (int32_t i = 0; i < len; ++i) {
- if (taosIsBigChar(pwd[i])) {
- charTypes[0] = true;
- } else if (taosIsSmallChar(pwd[i])) {
- charTypes[1] = true;
- } else if (taosIsNumberChar(pwd[i])) {
- charTypes[2] = true;
- } else if (taosIsSpecialChar(pwd[i])) {
- charTypes[3] = true;
- } else {
- return -1;
- }
- }
-
- int32_t numOfTypes = 0;
- for (int32_t i = 0; i < 4; ++i) {
- numOfTypes += charTypes[i];
- }
-
- if (numOfTypes < 3) {
- return -1;
- }
-
- return 0;
-}
-
static int32_t mndProcessCreateUserReq(SRpcMsg *pReq) {
SMnode *pMnode = pReq->info.node;
int32_t code = 0;
@@ -1895,31 +1844,6 @@ static int32_t mndProcessCreateUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
- char pass[TSDB_USET_PASSWORD_LONGLEN] = {0};
-
- int32_t len = strlen(createReq.longPass);
-
- if (len > 0) {
- strncpy(pass, createReq.longPass, TSDB_USET_PASSWORD_LONGLEN);
- } else {
- len = strlen(createReq.pass);
- strncpy(pass, createReq.pass, TSDB_PASSWORD_LEN);
- }
-
- if (createReq.isImport != 1) {
- if (mndCheckPasswordMinLen(pass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
- }
- if (mndCheckPasswordMaxLen(pass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
- }
- if (tsEnableStrongPassword) {
- if (mndCheckPasswordFmt(pass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
- }
- }
- }
-
code = mndAcquireUser(pMnode, createReq.user, &pUser);
if (pUser != NULL) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_USER_ALREADY_EXIST, &lino, _OVER);
@@ -2399,30 +2323,6 @@ static int32_t mndProcessAlterUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
- char userSetPass[TSDB_USET_PASSWORD_LONGLEN] = {0};
- int32_t len = strlen(alterReq.longPass);
-
- if (TSDB_ALTER_USER_PASSWD == alterReq.alterType) {
- if (len > 0) {
- strncpy(userSetPass, alterReq.longPass, TSDB_USET_PASSWORD_LONGLEN);
- } else {
- len = strlen(alterReq.pass);
- strncpy(userSetPass, alterReq.pass, TSDB_USET_PASSWORD_LEN);
- }
-
- if (mndCheckPasswordMinLen(userSetPass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
- }
- if (mndCheckPasswordMaxLen(userSetPass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
- }
- if (tsEnableStrongPassword) {
- if (mndCheckPasswordFmt(userSetPass, len) != 0) {
- TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
- }
- }
- }
-
TAOS_CHECK_GOTO(mndAcquireUser(pMnode, alterReq.user, &pUser), &lino, _OVER);
(void)mndAcquireUser(pMnode, pReq->info.conn.user, &pOperUser);
@@ -2435,11 +2335,13 @@ static int32_t mndProcessAlterUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(mndUserDupObj(pUser, &newUser), &lino, _OVER);
if (alterReq.alterType == TSDB_ALTER_USER_PASSWD) {
- char pass[TSDB_PASSWORD_LEN + 1] = {0};
+ if (alterReq.passIsMd5 == 1) {
+ (void)memcpy(newUser.pass, alterReq.pass, TSDB_PASSWORD_LEN);
+ } else {
+ taosEncryptPass_c((uint8_t *)alterReq.pass, strlen(alterReq.pass), newUser.pass);
+ }
- taosEncryptPass_c((uint8_t *)userSetPass, len, pass);
- (void)memcpy(newUser.pass, pass, TSDB_PASSWORD_LEN);
- if (0 != strncmp(pUser->pass, pass, TSDB_PASSWORD_LEN)) {
+ if (0 != strncmp(pUser->pass, newUser.pass, TSDB_PASSWORD_LEN)) {
++newUser.passVersion;
}
}
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 51fa970299..4d9e9d1fb0 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -104,7 +104,37 @@ static bool invalidPassword(const char* pPassword) {
/* Execute regular expression */
int32_t res = regexec(®ex, pPassword, 0, NULL, 0);
regfree(®ex);
- return 0 == res;
+ if(0 != res) return false;
+
+ if (strcmp(pPassword, "taosdata") == 0) {
+ return false;
+ }
+
+ bool charTypes[4] = {0};
+ for (int32_t i = 0; i < strlen(pPassword); ++i) {
+ if (taosIsBigChar(pPassword[i])) {
+ charTypes[0] = true;
+ } else if (taosIsSmallChar(pPassword[i])) {
+ charTypes[1] = true;
+ } else if (taosIsNumberChar(pPassword[i])) {
+ charTypes[2] = true;
+ } else if (taosIsSpecialChar(pPassword[i])) {
+ charTypes[3] = true;
+ } else {
+ return false;
+ }
+ }
+
+ int32_t numOfTypes = 0;
+ for (int32_t i = 0; i < 4; ++i) {
+ numOfTypes += charTypes[i];
+ }
+
+ if (numOfTypes < 3) {
+ return false;
+ }
+
+ return true;
}
static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken, char* pPassword) {
@@ -115,7 +145,7 @@ static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken,
} else {
strncpy(pPassword, pPasswordToken->z, pPasswordToken->n);
(void)strdequote(pPassword);
- if (strtrim(pPassword) <= 0) {
+ if (strtrim(pPassword) < TSDB_PASSWORD_MIN_LEN) {
pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY);
} else if (invalidPassword(pPassword)) {
pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_INVALID_PASSWD);
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index a10ab0be16..dd1fa6c6e8 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -10045,10 +10045,12 @@ static int32_t translateCreateUser(STranslateContext* pCxt, SCreateUserStmt* pSt
createReq.superUser = 0;
createReq.sysInfo = pStmt->sysinfo;
createReq.enable = 1;
- tstrncpy(createReq.longPass, pStmt->password, TSDB_USET_PASSWORD_LONGLEN);
createReq.isImport = pStmt->isImport;
createReq.createDb = pStmt->createDb;
+ taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), createReq.pass);
+ createReq.passIsMd5 = 1;
+
createReq.numIpRanges = pStmt->numIpRanges;
if (pStmt->numIpRanges > 0) {
createReq.pIpRanges = taosMemoryMalloc(createReq.numIpRanges * sizeof(SIpV4Range));
@@ -10090,7 +10092,10 @@ static int32_t translateAlterUser(STranslateContext* pCxt, SAlterUserStmt* pStmt
alterReq.enable = pStmt->enable;
alterReq.sysInfo = pStmt->sysinfo;
alterReq.createdb = pStmt->createdb ? 1 : 0;
- snprintf(alterReq.longPass, sizeof(alterReq.pass), "%s", pStmt->password);
+
+ taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), alterReq.pass);
+ alterReq.passIsMd5 = 1;
+
if (NULL != pCxt->pParseCxt->db) {
snprintf(alterReq.objname, sizeof(alterReq.objname), "%s", pCxt->pParseCxt->db);
}
diff --git a/source/libs/parser/test/parAlterToBalanceTest.cpp b/source/libs/parser/test/parAlterToBalanceTest.cpp
index e82c0eeab7..172c729f34 100644
--- a/source/libs/parser/test/parAlterToBalanceTest.cpp
+++ b/source/libs/parser/test/parAlterToBalanceTest.cpp
@@ -833,7 +833,7 @@ TEST_F(ParserInitialATest, alterUser) {
ASSERT_EQ(req.sysInfo, expect.sysInfo);
ASSERT_EQ(req.enable, expect.enable);
ASSERT_EQ(std::string(req.user), std::string(expect.user));
- ASSERT_EQ(std::string(req.longPass), std::string(expect.pass));
+ ASSERT_EQ(std::string(req.pass), std::string(expect.pass));
ASSERT_EQ(std::string(req.objname), std::string(expect.objname));
tFreeSAlterUserReq(&req);
});
diff --git a/source/libs/parser/test/parInitialCTest.cpp b/source/libs/parser/test/parInitialCTest.cpp
index b4d277f5d5..2412bf4e78 100644
--- a/source/libs/parser/test/parInitialCTest.cpp
+++ b/source/libs/parser/test/parInitialCTest.cpp
@@ -1362,7 +1362,7 @@ TEST_F(ParserInitialCTest, createUser) {
ASSERT_EQ(req.sysInfo, expect.sysInfo);
ASSERT_EQ(req.enable, expect.enable);
ASSERT_EQ(std::string(req.user), std::string(expect.user));
- ASSERT_EQ(std::string(req.longPass), std::string(expect.pass));
+ ASSERT_EQ(std::string(req.pass), std::string(expect.pass));
tFreeSCreateUserReq(&req);
});
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index 01dba7f394..dc6dbd7c7e 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -38,11 +38,24 @@ class TDTestCase(TBase):
# change setting
tdSql.execute("ALTER ALL DNODES 'enableStrongPassword' '0'")
+ time.sleep(3)
+
# weak
tdSql.execute("create user test1 pass '12345678' sysinfo 0;")
tdSql.execute("alter user test1 pass '12345678';")
+ # pass length
+ tdSql.error("alter user test1 pass '1234567';", expectErrInfo="Password too short or empty")
+
+ tdSql.error("create user test2 pass '1234567' sysinfo 0;", expectErrInfo="Password too short or empty")
+
+ tdSql.error("create user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456' sysinfo 0;", expectErrInfo="Name or password too long")
+
+ tdSql.execute("create user test2 pass '123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345' sysinfo 0;")
+
+ tdSql.error("alter user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456';", expectErrInfo="Name or password too long")
+
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
From 58b7a6fe978e88019dbebd6822923f2a7820c98c Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 13 Feb 2025 08:43:48 +0000
Subject: [PATCH 006/105] feat/TS-5927-long-password-invalid-pass-check
---
source/libs/parser/src/parAstCreater.c | 24 +++++++++++++++++-------
tests/army/cluster/strongPassword.py | 4 ++--
2 files changed, 19 insertions(+), 9 deletions(-)
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 4d9e9d1fb0..422882f651 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -104,10 +104,12 @@ static bool invalidPassword(const char* pPassword) {
/* Execute regular expression */
int32_t res = regexec(®ex, pPassword, 0, NULL, 0);
regfree(®ex);
- if(0 != res) return false;
+ return 0 == res;
+}
+static bool invalidStrongPassword(const char* pPassword) {
if (strcmp(pPassword, "taosdata") == 0) {
- return false;
+ return true;
}
bool charTypes[4] = {0};
@@ -121,7 +123,7 @@ static bool invalidPassword(const char* pPassword) {
} else if (taosIsSpecialChar(pPassword[i])) {
charTypes[3] = true;
} else {
- return false;
+ return true;
}
}
@@ -131,10 +133,10 @@ static bool invalidPassword(const char* pPassword) {
}
if (numOfTypes < 3) {
- return false;
+ return true;
}
- return true;
+ return false;
}
static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken, char* pPassword) {
@@ -147,8 +149,16 @@ static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken,
(void)strdequote(pPassword);
if (strtrim(pPassword) < TSDB_PASSWORD_MIN_LEN) {
pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY);
- } else if (invalidPassword(pPassword)) {
- pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_INVALID_PASSWD);
+ } else {
+ if (tsEnableStrongPassword) {
+ if (invalidStrongPassword(pPassword)) {
+ pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_INVALID_PASSWD);
+ }
+ } else {
+ if (invalidPassword(pPassword)) {
+ pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_INVALID_PASSWD);
+ }
+ }
}
}
return TSDB_CODE_SUCCESS == pCxt->errCode;
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index dc6dbd7c7e..311c77e158 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -27,11 +27,11 @@ class TDTestCase(TBase):
def run(self):
# strong
- tdSql.error("create user test pass '12345678' sysinfo 0;", expectErrInfo="Invalid password format")
+ tdSql.error("create user test pass '12345678' sysinfo 0;", expectErrInfo="Invalid password")
tdSql.execute("create user test pass '12345678@Abc' sysinfo 0;")
- tdSql.error("alter user test pass '23456789'", expectErrInfo="Invalid password format")
+ tdSql.error("alter user test pass '23456789'", expectErrInfo="Invalid password")
tdSql.execute("alter user test pass '23456789@Abc';")
From 80f60449e3bfeb23693f823163cdb54fdfacd9cf Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 13 Feb 2025 10:02:17 +0000
Subject: [PATCH 007/105] feat/TS-5927-long-password-fix-cases
---
source/libs/parser/src/parTranslater.c | 7 +++++--
source/libs/parser/test/parAlterToBalanceTest.cpp | 6 +++---
source/libs/parser/test/parInitialCTest.cpp | 10 +++++-----
3 files changed, 13 insertions(+), 10 deletions(-)
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index dd1fa6c6e8..a2b593e234 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -10093,8 +10093,11 @@ static int32_t translateAlterUser(STranslateContext* pCxt, SAlterUserStmt* pStmt
alterReq.sysInfo = pStmt->sysinfo;
alterReq.createdb = pStmt->createdb ? 1 : 0;
- taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), alterReq.pass);
- alterReq.passIsMd5 = 1;
+ int32_t len = strlen(pStmt->password);
+ if (len > 0) {
+ taosEncryptPass_c((uint8_t*)pStmt->password, len, alterReq.pass);
+ alterReq.passIsMd5 = 1;
+ }
if (NULL != pCxt->pParseCxt->db) {
snprintf(alterReq.objname, sizeof(alterReq.objname), "%s", pCxt->pParseCxt->db);
diff --git a/source/libs/parser/test/parAlterToBalanceTest.cpp b/source/libs/parser/test/parAlterToBalanceTest.cpp
index 172c729f34..f90d6d13e1 100644
--- a/source/libs/parser/test/parAlterToBalanceTest.cpp
+++ b/source/libs/parser/test/parAlterToBalanceTest.cpp
@@ -817,7 +817,7 @@ TEST_F(ParserInitialATest, alterUser) {
expect.sysInfo = sysInfo;
expect.enable = enable;
if (nullptr != pPass) {
- strcpy(expect.pass, pPass);
+ taosEncryptPass_c((uint8_t*)pPass, strlen(pPass), expect.pass);
}
strcpy(expect.objname, "test");
};
@@ -838,8 +838,8 @@ TEST_F(ParserInitialATest, alterUser) {
tFreeSAlterUserReq(&req);
});
- setAlterUserReq("wxy", TSDB_ALTER_USER_PASSWD, "123456");
- run("ALTER USER wxy PASS '123456'");
+ setAlterUserReq("wxy", TSDB_ALTER_USER_PASSWD, "12345678@Abc");
+ run("ALTER USER wxy PASS '12345678@Abc'");
clearAlterUserReq();
setAlterUserReq("wxy", TSDB_ALTER_USER_ENABLE, nullptr, 0, 1);
diff --git a/source/libs/parser/test/parInitialCTest.cpp b/source/libs/parser/test/parInitialCTest.cpp
index 2412bf4e78..878185ee11 100644
--- a/source/libs/parser/test/parInitialCTest.cpp
+++ b/source/libs/parser/test/parInitialCTest.cpp
@@ -1345,11 +1345,11 @@ TEST_F(ParserInitialCTest, createUser) {
auto setCreateUserReq = [&](const char* pUser, const char* pPass, int8_t sysInfo = 1) {
strcpy(expect.user, pUser);
- strcpy(expect.pass, pPass);
expect.createType = 0;
expect.superUser = 0;
expect.sysInfo = sysInfo;
expect.enable = 1;
+ taosEncryptPass_c((uint8_t*)pPass, strlen(pPass), expect.pass);
};
setCheckDdlFunc([&](const SQuery* pQuery, ParserStage stage) {
@@ -1366,12 +1366,12 @@ TEST_F(ParserInitialCTest, createUser) {
tFreeSCreateUserReq(&req);
});
- setCreateUserReq("wxy", "123456");
- run("CREATE USER wxy PASS '123456'");
+ setCreateUserReq("wxy", "12345678@Abc");
+ run("CREATE USER wxy PASS '12345678@Abc'");
clearCreateUserReq();
- setCreateUserReq("wxy1", "a123456", 1);
- run("CREATE USER wxy1 PASS 'a123456' SYSINFO 1");
+ setCreateUserReq("wxy1", "12345678@Abc", 1);
+ run("CREATE USER wxy1 PASS '12345678@Abc' SYSINFO 1");
clearCreateUserReq();
}
From 70686226d89b21c797985a785966b79a3efd5142 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 13 Feb 2025 10:14:02 +0000
Subject: [PATCH 008/105] feat/TS-5927-long-password-fix-cases
---
source/libs/parser/src/parAstCreater.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 422882f651..a6e0bb8a96 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -109,7 +109,7 @@ static bool invalidPassword(const char* pPassword) {
static bool invalidStrongPassword(const char* pPassword) {
if (strcmp(pPassword, "taosdata") == 0) {
- return true;
+ return false;
}
bool charTypes[4] = {0};
From 8f2370f3fa878087c9e64f52d81b5ff01995ed1f Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 01:12:09 +0000
Subject: [PATCH 009/105] feat/TS-5927-long-password-fix-case
---
source/dnode/mnode/impl/src/mndUser.c | 11 +++--------
source/libs/parser/inc/parAst.h | 2 +-
source/libs/parser/src/parAstCreater.c | 23 +++++++++++++++++++++--
source/libs/parser/src/parTranslater.c | 9 +++++++--
tests/script/tsim/user/password.sim | 4 ++--
5 files changed, 34 insertions(+), 15 deletions(-)
diff --git a/source/dnode/mnode/impl/src/mndUser.c b/source/dnode/mnode/impl/src/mndUser.c
index c7730e8546..395897914e 100644
--- a/source/dnode/mnode/impl/src/mndUser.c
+++ b/source/dnode/mnode/impl/src/mndUser.c
@@ -1706,15 +1706,10 @@ static int32_t mndCreateUser(SMnode *pMnode, char *acct, SCreateUserReq *pCreate
int32_t lino = 0;
SUserObj userObj = {0};
- if (pCreate->isImport != 1) {
- if (pCreate->passIsMd5 == 1) {
- memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
- } else {
- taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
- }
- } else {
- // mInfo("pCreate->pass:%s", pCreate->eass)
+ if (pCreate->passIsMd5 == 1) {
memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
+ } else {
+ taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
}
tstrncpy(userObj.user, pCreate->user, TSDB_USER_LEN);
diff --git a/source/libs/parser/inc/parAst.h b/source/libs/parser/inc/parAst.h
index 559009d215..559c612807 100644
--- a/source/libs/parser/inc/parAst.h
+++ b/source/libs/parser/inc/parAst.h
@@ -254,7 +254,7 @@ SNode* createShowDnodeVariablesStmt(SAstCreateContext* pCxt, SNode* pDnodeId, SN
SNode* createShowVnodesStmt(SAstCreateContext* pCxt, SNode* pDnodeId, SNode* pDnodeEndpoint);
SNode* createShowTableTagsStmt(SAstCreateContext* pCxt, SNode* pTbName, SNode* pDbName, SNodeList* pTags);
SNode* createCreateUserStmt(SAstCreateContext* pCxt, SToken* pUserName, const SToken* pPassword, int8_t sysinfo,
- int8_t createdb, int8_t is_import);
+ int8_t is_import, int8_t createdb);
SNode* addCreateUserStmtWhiteList(SAstCreateContext* pCxt, SNode* pStmt, SNodeList* pIpRangesNodeList);
SNode* createAlterUserStmt(SAstCreateContext* pCxt, SToken* pUserName, int8_t alterType, void* pAlterInfo);
SNode* createDropUserStmt(SAstCreateContext* pCxt, SToken* pUserName);
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index a6e0bb8a96..908047fed3 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -164,6 +164,21 @@ static bool checkPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken,
return TSDB_CODE_SUCCESS == pCxt->errCode;
}
+static bool checkImportPassword(SAstCreateContext* pCxt, const SToken* pPasswordToken, char* pPassword) {
+ if (NULL == pPasswordToken) {
+ pCxt->errCode = TSDB_CODE_PAR_SYNTAX_ERROR;
+ } else if (pPasswordToken->n > (32 + 2)) {
+ pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG);
+ } else {
+ strncpy(pPassword, pPasswordToken->z, pPasswordToken->n);
+ (void)strdequote(pPassword);
+ if (strtrim(pPassword) < 32) {
+ pCxt->errCode = generateSyntaxErrMsg(&pCxt->msgBuf, TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY);
+ }
+ }
+ return TSDB_CODE_SUCCESS == pCxt->errCode;
+}
+
static int32_t parsePort(SAstCreateContext* pCxt, const char* p, int32_t* pPort) {
*pPort = taosStr2Int32(p, NULL, 10);
if (*pPort >= UINT16_MAX || *pPort <= 0) {
@@ -3068,11 +3083,15 @@ _err:
}
SNode* createCreateUserStmt(SAstCreateContext* pCxt, SToken* pUserName, const SToken* pPassword, int8_t sysinfo,
- int8_t createDb, int8_t is_import) {
+ int8_t is_import, int8_t createDb) {
CHECK_PARSER_STATUS(pCxt);
char password[TSDB_USET_PASSWORD_LONGLEN + 3] = {0};
CHECK_NAME(checkUserName(pCxt, pUserName));
- CHECK_NAME(checkPassword(pCxt, pPassword, password));
+ if (is_import == 0) {
+ CHECK_NAME(checkPassword(pCxt, pPassword, password));
+ } else {
+ CHECK_NAME(checkImportPassword(pCxt, pPassword, password));
+ }
SCreateUserStmt* pStmt = NULL;
pCxt->errCode = nodesMakeNode(QUERY_NODE_CREATE_USER_STMT, (SNode**)&pStmt);
CHECK_MAKE_NODE(pStmt);
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index a2b593e234..3394bcc68c 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -10048,8 +10048,13 @@ static int32_t translateCreateUser(STranslateContext* pCxt, SCreateUserStmt* pSt
createReq.isImport = pStmt->isImport;
createReq.createDb = pStmt->createDb;
- taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), createReq.pass);
- createReq.passIsMd5 = 1;
+ if(pStmt->isImport == 1){
+ tstrncpy(createReq.pass, pStmt->password, TSDB_USET_PASSWORD_LEN);
+ }
+ else{
+ taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), createReq.pass);
+ createReq.passIsMd5 = 1;
+ }
createReq.numIpRanges = pStmt->numIpRanges;
if (pStmt->numIpRanges > 0) {
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 4969ee0fa0..fc1594f26a 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -273,9 +273,9 @@ sql create user u27 pass 'taosdata1.'
sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-sql CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-sql CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
From bbf169f5bc2af6d6a912bbb946e6c0c8c311864e Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 03:35:37 +0000
Subject: [PATCH 010/105] feat/TS-5927-long-password-add-case
---
source/client/src/clientImpl.c | 2 +-
source/util/src/terror.c | 4 ++--
tests/army/cluster/strongPassword.py | 4 ++++
tools/shell/inc/shellInt.h | 2 +-
4 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/source/client/src/clientImpl.c b/source/client/src/clientImpl.c
index c2a199e9c1..497398a8cd 100644
--- a/source/client/src/clientImpl.c
+++ b/source/client/src/clientImpl.c
@@ -54,7 +54,7 @@ static bool stringLengthCheck(const char* str, size_t maxsize) {
static bool validateUserName(const char* user) { return stringLengthCheck(user, TSDB_USER_LEN - 1); }
-static bool validatePassword(const char* passwd) { return stringLengthCheck(passwd, TSDB_PASSWORD_LEN - 1); }
+static bool validatePassword(const char* passwd) { return stringLengthCheck(passwd, TSDB_PASSWORD_MAX_LEN); }
static bool validateDbName(const char* db) { return stringLengthCheck(db, TSDB_DB_NAME_LEN - 1); }
diff --git a/source/util/src/terror.c b/source/util/src/terror.c
index ba2d471ccf..59a694f814 100644
--- a/source/util/src/terror.c
+++ b/source/util/src/terror.c
@@ -130,7 +130,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_VERSION, "Invalid client versio
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_IE, "Invalid client ie")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_FQDN, "Invalid host name")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_USER_LENGTH, "Invalid user name")
-TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PASS_LENGTH, "Invalid password")
+TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_PASS_LENGTH, "Invalid password length")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_DB_LENGTH, "Database name too long")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_TABLE_ID_LENGTH, "Table name too long")
TAOS_DEFINE_ERROR(TSDB_CODE_TSC_INVALID_CONNECTION, "Invalid connection")
@@ -694,7 +694,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_PAR_PERMISSION_DENIED, "Permission denied o
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_STREAM_QUERY, "Invalid stream query")
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_INTERNAL_PK, "Invalid _c0 or _rowts expression")
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_TIMELINE_FUNC, "Invalid timeline function")
-TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_PASSWD, "Invalid password")
+TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_PASSWD, "Invalid password 2")
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_ALTER_TABLE, "Invalid alter table statement")
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_CANNOT_DROP_PRIMARY_KEY, "Primary timestamp column cannot be dropped")
TAOS_DEFINE_ERROR(TSDB_CODE_PAR_INVALID_MODIFY_COL, "Only varbinary/binary/nchar/geometry column length could be modified, and the length can only be increased, not decreased")
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index 311c77e158..48dbe6d512 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -54,6 +54,10 @@ class TDTestCase(TBase):
tdSql.execute("create user test2 pass '123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345' sysinfo 0;")
+ cmd = "taos -u test2 -p123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345 -s 'show databases;'"
+ if os.system(cmd) != 0:
+ raise Exception("failed to execute system command. cmd: %s" % cmd)
+
tdSql.error("alter user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456';", expectErrInfo="Name or password too long")
def stop(self):
diff --git a/tools/shell/inc/shellInt.h b/tools/shell/inc/shellInt.h
index b1f09d5161..9e14cd32f0 100644
--- a/tools/shell/inc/shellInt.h
+++ b/tools/shell/inc/shellInt.h
@@ -65,7 +65,7 @@ typedef struct {
const char* commands;
const char* netrole;
char file[PATH_MAX];
- char password[TSDB_USET_PASSWORD_LEN];
+ char password[TSDB_USET_PASSWORD_LONGLEN];
bool is_gen_auth;
bool is_bi_mode;
bool is_raw_time;
From cc3b883a437fc8163f1d96101846dd62759202bb Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 06:04:22 +0000
Subject: [PATCH 011/105] feat/TS-5927-long-password-compatable
---
source/dnode/mnode/impl/src/mndUser.c | 82 ++++++++++++++++++++++++++-
1 file changed, 81 insertions(+), 1 deletion(-)
diff --git a/source/dnode/mnode/impl/src/mndUser.c b/source/dnode/mnode/impl/src/mndUser.c
index 395897914e..0e9d2dd1a1 100644
--- a/source/dnode/mnode/impl/src/mndUser.c
+++ b/source/dnode/mnode/impl/src/mndUser.c
@@ -1709,7 +1709,12 @@ static int32_t mndCreateUser(SMnode *pMnode, char *acct, SCreateUserReq *pCreate
if (pCreate->passIsMd5 == 1) {
memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
} else {
- taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
+ if (pCreate->isImport != 1) {
+ taosEncryptPass_c((uint8_t *)pCreate->pass, strlen(pCreate->pass), userObj.pass);
+ } else {
+ // mInfo("pCreate->pass:%s", pCreate->eass)
+ memcpy(userObj.pass, pCreate->pass, TSDB_PASSWORD_LEN);
+ }
}
tstrncpy(userObj.user, pCreate->user, TSDB_USER_LEN);
@@ -1806,6 +1811,52 @@ _OVER:
TAOS_RETURN(code);
}
+static int32_t mndCheckPasswordMinLen(const char *pwd, int32_t len) {
+ if (len < TSDB_PASSWORD_MIN_LEN) {
+ return -1;
+ }
+ return 0;
+}
+
+static int32_t mndCheckPasswordMaxLen(const char *pwd, int32_t len) {
+ if (len > TSDB_PASSWORD_MAX_LEN) {
+ return -1;
+ }
+ return 0;
+}
+
+static int32_t mndCheckPasswordFmt(const char *pwd, int32_t len) {
+ if (strcmp(pwd, "taosdata") == 0) {
+ return 0;
+ }
+
+ bool charTypes[4] = {0};
+ for (int32_t i = 0; i < len; ++i) {
+ if (taosIsBigChar(pwd[i])) {
+ charTypes[0] = true;
+ } else if (taosIsSmallChar(pwd[i])) {
+ charTypes[1] = true;
+ } else if (taosIsNumberChar(pwd[i])) {
+ charTypes[2] = true;
+ } else if (taosIsSpecialChar(pwd[i])) {
+ charTypes[3] = true;
+ } else {
+ return -1;
+ }
+ }
+
+ int32_t numOfTypes = 0;
+ for (int32_t i = 0; i < 4; ++i) {
+ numOfTypes += charTypes[i];
+ }
+
+ if (numOfTypes < 3) {
+ return -1;
+ }
+
+ return 0;
+}
+
static int32_t mndProcessCreateUserReq(SRpcMsg *pReq) {
SMnode *pMnode = pReq->info.node;
int32_t code = 0;
@@ -1839,6 +1890,21 @@ static int32_t mndProcessCreateUserReq(SRpcMsg *pReq) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
+ if(createReq.passIsMd5 == 0){
+ int32_t len = strlen(createReq.pass);
+ if (createReq.isImport != 1) {
+ if (mndCheckPasswordMinLen(createReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
+ }
+ if (mndCheckPasswordMaxLen(createReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
+ }
+ if (mndCheckPasswordFmt(createReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ }
+ }
+ }
+
code = mndAcquireUser(pMnode, createReq.user, &pUser);
if (pUser != NULL) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_USER_ALREADY_EXIST, &lino, _OVER);
@@ -2317,6 +2383,20 @@ static int32_t mndProcessAlterUserReq(SRpcMsg *pReq) {
if (alterReq.user[0] == 0) {
TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_USER_FORMAT, &lino, _OVER);
}
+ if(alterReq.passIsMd5 == 0){
+ if (TSDB_ALTER_USER_PASSWD == alterReq.alterType) {
+ int32_t len = strlen(alterReq.pass);
+ if (mndCheckPasswordMinLen(alterReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_PAR_PASSWD_TOO_SHORT_OR_EMPTY, &lino, _OVER);
+ }
+ if (mndCheckPasswordMaxLen(alterReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_PAR_NAME_OR_PASSWD_TOO_LONG, &lino, _OVER);
+ }
+ if (mndCheckPasswordFmt(alterReq.pass, len) != 0) {
+ TAOS_CHECK_GOTO(TSDB_CODE_MND_INVALID_PASS_FORMAT, &lino, _OVER);
+ }
+ }
+ }
TAOS_CHECK_GOTO(mndAcquireUser(pMnode, alterReq.user, &pUser), &lino, _OVER);
From c576b31f2eb0f3c758dfc2dc7c72c483cc6d78c3 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 06:47:21 +0000
Subject: [PATCH 012/105] feat/TS-5927-long-password-fix-case
---
source/libs/parser/src/parTranslater.c | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index 3394bcc68c..0dbc982a14 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -10053,8 +10053,9 @@ static int32_t translateCreateUser(STranslateContext* pCxt, SCreateUserStmt* pSt
}
else{
taosEncryptPass_c((uint8_t*)pStmt->password, strlen(pStmt->password), createReq.pass);
- createReq.passIsMd5 = 1;
- }
+
+ }
+ createReq.passIsMd5 = 1;
createReq.numIpRanges = pStmt->numIpRanges;
if (pStmt->numIpRanges > 0) {
From 6d3d2b787ad7949aed297682857d0ae22772f8f5 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 08:18:34 +0000
Subject: [PATCH 013/105] feat/TS-5927-long-password-retry-cases-fail
---
tests/parallel_test/cases.task | 1772 +--------------------------
tests/script/tsim/user/password.sim | 12 +-
2 files changed, 8 insertions(+), 1776 deletions(-)
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 0df67d3cf2..765f646cdd 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -4,1790 +4,22 @@
#unit-test
-,,n,unit-test,bash test.sh
#docs-examples test
-,,n,docs-examples-test,bash c.sh
-,,n,docs-examples-test,bash python.sh
-,,n,docs-examples-test,bash node.sh
-,,n,docs-examples-test,bash csharp.sh
-,,n,docs-examples-test,bash jdbc.sh
-,,n,docs-examples-test,bash rust.sh
-,,n,docs-examples-test,bash go.sh
-,,n,docs-examples-test,bash test_R.sh
+
#
# army-test
#
-,,y,army,./pytest.sh python3 ./test.py -f multi-level/mlevel_basic.py -N 3 -L 3 -D 2
-,,y,army,./pytest.sh python3 ./test.py -f db-encrypt/basic.py -N 3 -M 3
-,,y,army,./pytest.sh python3 ./test.py -f cluster/arbitrator.py -N 3
-,,n,army,python3 ./test.py -f storage/s3/s3Basic.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f cluster/snapshot.py -N 3 -L 3 -D 2
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_func_elapsed.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_function.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_selection_function_with_json.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_func_paramnum.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_percentile.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_resinfo.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interp.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interval.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interval_diff_tz.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/concat.py
-,,y,army,./pytest.sh python3 ./test.py -f query/function/cast.py
-,,y,army,./pytest.sh python3 ./test.py -f query/test_join.py
-,,y,army,./pytest.sh python3 ./test.py -f query/test_compare.py
-,,y,army,./pytest.sh python3 ./test.py -f query/test_case_when.py
-,,y,army,./pytest.sh python3 ./test.py -f insert/test_column_tag_boundary.py
-,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_desc.py -N 3 -L 3 -D 2
-,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_null.py
-,,y,army,./pytest.sh python3 ./test.py -f cluster/test_drop_table_by_uid.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f cluster/incSnapshot.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f cluster/clusterBasic.py -N 5
-,,y,army,./pytest.sh python3 ./test.py -f cluster/tsdbSnapshot.py -N 3 -M 3
-,,y,army,./pytest.sh python3 ./test.py -f query/query_basic.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_query_accuracy.py
-,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_ts5400.py
-,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_having.py
-,,y,army,./pytest.sh python3 ./test.py -f insert/insert_basic.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f cluster/splitVgroupByLearner.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f authorith/authBasic.py -N 3
-,,n,army,python3 ./test.py -f cmdline/fullopt.py
-,,y,army,./pytest.sh python3 ./test.py -f query/show.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f alter/alterConfig.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f alter/test_alter_config.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f alter/test_alter_config.py -N 3 -M 3
-,,y,army,./pytest.sh python3 ./test.py -f query/subquery/subqueryBugs.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f storage/oneStageComp.py -N 3 -L 3 -D 1
-,,y,army,./pytest.sh python3 ./test.py -f storage/compressBasic.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f grant/grantBugs.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f query/queryBugs.py -N 3
-,,n,army,python3 ./test.py -f user/test_passwd.py
-,,y,army,./pytest.sh python3 ./test.py -f tmq/tmqBugs.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_compare_asc_desc.py
-,,y,army,./pytest.sh python3 ./test.py -f query/last/test_last.py
-,,y,army,./pytest.sh python3 ./test.py -f query/window/base.py
-,,y,army,./pytest.sh python3 ./test.py -f query/sys/tb_perf_queries_exist_test.py -N 3
-,,y,army,./pytest.sh python3 ./test.py -f query/test_having.py
-,,n,army,python3 ./test.py -f tmq/drop_lost_comsumers.py
-,,y,army,./pytest.sh python3 ./test.py -f cmdline/taosCli.py
-,,n,army,python3 ./test.py -f whole/checkErrorCode.py
-,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPassword.py
+
#
# army/tools
#
# benchmark 64 cases
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/rest_insert_alltypes_json.py -R
-,,n,army,python3 ./test.py -f tools/benchmark/basic/taosdemoTestQueryWithJson-mixed-query.py -R
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosdemoTestInsertWithJsonStmt-otherPara.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_telnet_insert_alltypes-same-min-max.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/default_tmq_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/reuse-exist-stb.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_interlace.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt2_insert.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_offset_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/json_tag.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-sml-rest.py -R
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_auto_create_table_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/insert-json-csv.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert-table-creating-interval.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/insertMix.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-mix.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stream_function_test.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/telnet_tcp.py -R
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json-subtable.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to-continue.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_json_alltypes-interlace.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/commandline-retry.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/tmq_case.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/limit_offset_json.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/commandline-sml.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_insert_alltypes_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_taosjson_insert_alltypes-same-min-max.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes-same-min-max.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/bugs.py -B
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_sample_csv_json-subtable.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json-with-error-sqlfile.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-retry-json-global.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/exportCsv.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosdemoTestQueryWithJson.py -R
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-partial-col-numpy.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json-with-sqlfile.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json.py -B
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_json_alltypes.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/invalid_commandline.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_json_insert_alltypes-same-min-max.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/default_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_sample_csv_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json_doesnt_use_ts.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/taosadapter_json.py -B
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/demo.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-supplement-insert.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/custom_col_tag.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_auto_create_table_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_insert_alltypes_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_insert_alltypes-same-min-max.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-vgroups.py
-,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-retry-json-stb.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_auto_create_table_json.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_telnet_alltypes.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stream-test.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_taosjson_alltypes.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-single-table.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes_json-partial-col.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/cloud/cloud-test.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/ws/websocket.py -R
-# taosdump 43 cases
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTest.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpDbStb.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeDouble.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedBigInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpManyCols.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpStartEndTime.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTypeVarbinary.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTypeGeometry.py
-,,n,army,python3 ./test.py -f tools/taosdump/native/taosdumpDbWithNonRoot.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpEscapedDb.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeJson.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestBasic.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedSmallInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpDbNtb.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedTinyInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeSmallInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestNanoSupport.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBigInt.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBinary.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeFloat.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpStartEndTimeLong.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestLooseMode.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBool.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestInspect.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpInDiffType.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTest2.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeTinyInt.py
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeDouble.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedBigInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpEscapedDb.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpPrimaryKey.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeJson.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedSmallInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedTinyInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeSmallInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBigInt.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBinary.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeFloat.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBool.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpRetry.py -B
-,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeTinyInt.py -B
-
-#
-# system test
-#
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/stream_multi_agg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/stream_basic.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/scalar_function.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_session.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_state_window.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_session.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_state_window.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_session.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_interval_ext.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_interval_ext.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_session_ext.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/partition_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/pause_resume_test.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/state_window_case.py
-#,,n,system-test,python3 ./test.py -f 8-stream/vnode_restart.py -N 4
-#,,n,system-test,python3 ./test.py -f 8-stream/snode_restart.py -N 4
-,,n,system-test,python3 ./test.py -f 8-stream/snode_restart_with_checkpoint.py -N 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/force_window_close_interp.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/force_window_close_interval.py
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_error.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_func.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_varchar.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_func_group.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_expr.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/project_group.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname_vgroup.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/compact-col.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tms_memleak.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/para_tms.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/para_tms2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqShow.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropStb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb0.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb3.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb0.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/ins_topics_test.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqMaxTopic.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqParamsTest.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqParamsTest.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqMaxGroupIds.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsumeDiscontinuousData.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqOffset.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_primary_key.py
-,,n,system-test,python3 ./test.py -f 7-tmq/tmqDropConsumer.py
-
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_stb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_stable.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/stt_blocks_check.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_null.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/database_pre_suf.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 4
-,,n,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5761.py
-,,n,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5761-scalemode.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5712.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 4
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-32548.py
-,,n,system-test,python3 ./test.py -f 2-query/large_data.py
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/checkpoint_info.py -N 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/checkpoint_info2.py -N 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_multi_insert.py
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreDnode.py -N 5 -M 3 -i False
-,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreVnode.py -N 5 -M 3 -i False
-,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreMnode.py -N 5 -M 3 -i False
-,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreQnode.py -N 5 -M 3 -i False
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/create_wrong_topic.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dropDbR3ConflictTransaction.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/basic5.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/ts-4674.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-30270.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb3.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb4.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb4.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/db.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqError.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/schema.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbFilterWhere.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqCheckData.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqCheckData1.py
-#,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsumerGroup.py
-,,n,system-test,python3 ./test.py -f 7-tmq/tmqConsumerGroup.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqAlterSchema.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-1ctb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-1ctb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-1ctb-funcNFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg-mutilCtb-funcNFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg-mutilCtb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-1ctb-funcNFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg-mutilCtb-funcNFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg-mutilCtb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqAutoCreateTbl.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDnodeRestart.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDnodeRestart1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-1ctb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdateWithConsume.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-multiCtb-snapshot0.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-multiCtb-snapshot1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDelete-1ctb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDelete-multiCtb.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropStbCtb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropNtb-snapshot0.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropNtb-snapshot1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf-multCtb-snapshot0.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf-multCtb-snapshot1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbTagFilter-1ctb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dataFromTsdbNWal.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dataFromTsdbNWal-multiCtb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_taosx.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts5466.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td33504.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts-5473.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts5906.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-32187.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-33225.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts4563.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td32526.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td32471.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_replay.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqSeekAndCommit.py
-,,n,system-test,python3 ./test.py -f 7-tmq/tmq_offset.py
-,,n,system-test,python3 ./test.py -f 7-tmq/tmqDataPrecisionUnit.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/raw_block_interface_test.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbTagFilter-multiCtb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqSubscribeStb-r3.py -N 5
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq3mnodeSwitch.py -N 6 -M 3 -i True
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq3mnodeSwitch.py -N 6 -M 3 -n 3 -i True
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-db-removewal.py -N 2 -n 1
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb-removewal.py -N 6 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb.py -N 2 -n 1
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb.py -N 6 -n 3
-#,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-db.py -N 6 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select.py -N 2 -n 1
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-duplicatedata.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-duplicatedata-false.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-ntb-select.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-false.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-false.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-column.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-column-false.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-db.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-db-false.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/walRemoveLog.py -N 3
-
-,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeReplicate.py -M 3 -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-19201.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3404.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3581.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3311.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3821.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-5130.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-5580.py
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/balance_vgroups_r1.py -N 6
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShell.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShellError.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShellNetChk.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/telemetry.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/backquote_check.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosdMonitor.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosdNewMonitor.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosd_audit.py
-,,n,system-test,python3 ./test.py -f 0-others/taosdlog.py
-,,n,system-test,python3 ./test.py -f 0-others/taosdShell.py -N 5 -M 3 -Q 3
-,,n,system-test,python3 ./test.py -f 0-others/udfTest.py
-,,n,system-test,python3 ./test.py -f 0-others/udf_create.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/udf_restart_taosd.py
-,,n,system-test,python3 ./test.py -f 0-others/udf_cfg1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/udf_cfg2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/cachemodel.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/sysinfo.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_control.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_manage.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege_show.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege_all.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/fsync.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/retention_test.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/retention_test2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel_createdb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttl.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttlChangeOnWrite.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compress_tsz1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compress_tsz2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/view/non_marterial_view/test_view.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_show_table_distributed.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_show_disk_usage.py
-,,n,system-test,python3 ./test.py -f 0-others/compatibility.py
-,,n,system-test,python3 ./test.py -f 0-others/tag_index_basic.py
-,,n,system-test,python3 ./test.py -f 0-others/udfpy_main.py
-,,n,system-test,python3 ./test.py -N 3 -f 0-others/walRetention.py
-,,n,system-test,python3 ./test.py -f 0-others/wal_level_skip.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroup.py -N 3 -n 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroupWal.py -N 3 -n 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroup.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroupWal.py -N 3 -n 3
-,,n,system-test,python3 ./test.py -f 0-others/timeRangeWise.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/delete_check.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_hot_refresh_configurations.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/subscribe_stream_privilege.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/empty_identifier.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show_transaction_detail.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/kill_balance_leader.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/kill_restore_dnode.py -N 5
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/persisit_config.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/qmemCtrl.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact_vgroups.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact_auto.py
-,,n,system-test,python3 ./test.py -f 0-others/dumpsdb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact.py -N 3
-
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_create.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_insert.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_delete.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_double.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_database.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_replica.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/influxdb_line_taosc_insert.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/opentsdb_telnet_line_taosc_insert.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/opentsdb_json_taosc_insert.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_stmt_muti_insert_query.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_stmt_set_tbname_tag.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_stable.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_table.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/boundary.py
-,,n,system-test,python3 ./test.py -f 1-insert/insertWithMoreVgroup.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_comment.py
-#,,n,system-test,python3 ./test.py -f 1-insert/time_range_wise.py
-#,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/block_wise.py
-#,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/create_retentions.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/mutil_stage.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_param_ttl.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_param_ttl.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/update_data_muti_rows.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/db_tb_name_check.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/InsertFuturets.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_wide_column.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_column_value.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_from_csv.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_benchmark.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/precisionUS.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/precisionNS.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4219.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/ts-4272.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4295.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td27388.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4479.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td29793.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_timestamp.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td29157.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/ddl_in_sysdb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show_tag_index.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/information_schema.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ins_filesets.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/grant.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/and_or_for_byte.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/and_or_for_byte.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/db.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/db.py -N 3 -n 3 -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/histogram.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/histogram.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/limit.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/logical_operators.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/logical_operators.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/orderBy.py -N 5
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaBasic.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaTest.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaTest.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/sma_index.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml_TS-3724.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml-TD19291.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varbinary.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sum.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sum.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/update_data.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/tb_100w_data_order.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_childtable.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_normaltable.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/keep_expired.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/stmt_error.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/drop.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/drop.py -N 3 -M 3 -i False -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/systable_func.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4382.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4403.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_td28163.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tagFilter.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts_3405_3398_3423.py -N 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4348-td-27939.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/backslash_g.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4467.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/geometry.py
-
-,,n,system-test,python3 ./test.py -f 2-query/queryQnode.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode1mnode.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode2mnode.py -N 5
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop.py -N 5 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop.py -N 5 -M 3 -i False
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3 -i False
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStopLoop.py -N 5 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateDb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateDb.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateDb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateDb.py -N 6 -M 3 -n 3
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeModifyMeta.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeModifyMeta.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateStb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateStb.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertData.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertData.py -N 6 -M 3 -n 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertDataAsync.py -N 6 -M 3
-#,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertDataAsync.py -N 6 -M 3 -n 3
-,,n,system-test,python3 ./test.py -f 6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py -N 6 -M 3
-#,,n,system-test,python3 ./test.py -f 6-cluster/5dnode3mnodeRoll.py -N 3 -C 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeAdd1Ddnoe.py -N 7 -M 3 -C 6
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeAdd1Ddnoe.py -N 7 -M 3 -C 6 -n 3
-#,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeDrop.py -N 5
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRecreateMnode.py -N 6 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStopFollowerLeader.py -N 5 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py -N 4 -M 1
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/compactDBConflict.py -N 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/mnodeEncrypt.py 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 2
-
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 4
-#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 4
-#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 4
-#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -R
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/odbc.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill_with_group.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/state_window.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-20582.py
-,,n,system-test,python3 ./test.py -f eco-system/meta/database/keep_time_offset.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 2
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 3
-,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 4
-,,y,system-test,./pytest.sh python3 ./test.py -f eco-system/manager/schema_change.py -N 3 -M 3
#tsim test
-,,y,script,./test.sh -f tsim/query/timeline.sim
-,,y,script,./test.sh -f tsim/join/join.sim
-,,y,script,./test.sh -f tsim/tmq/basic2Of2ConsOverlap.sim
-,,y,script,./test.sh -f tsim/parser/where.sim
-,,y,script,./test.sh -f tsim/parser/join_manyblocks.sim
-,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v1_leader.sim
-,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v1_follower.sim
-,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v2.sim
-,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v3.sim
-,,y,script,./test.sh -f tsim/parser/limit1.sim
-,,y,script,./test.sh -f tsim/parser/union.sim
-,,y,script,./test.sh -f tsim/parser/commit.sim
-,,y,script,./test.sh -f tsim/parser/nestquery.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError7.sim
-,,y,script,./test.sh -f tsim/parser/groupby.sim
-,,y,script,./test.sh -f tsim/parser/sliding.sim
-,,y,script,./test.sh -f tsim/dnode/balance2.sim
-,,y,script,./test.sh -f tsim/vnode/replica3_repeat.sim
-,,y,script,./test.sh -f tsim/parser/col_arithmetic_operation.sim
-#,,y,script,./test.sh -f tsim/trans/create_db.sim
-,,y,script,./test.sh -f tsim/dnode/balance3.sim
-,,y,script,./test.sh -f tsim/vnode/replica3_many.sim
-,,y,script,./test.sh -f tsim/stable/metrics_idx.sim
-# ,,y,script,./test.sh -f tsim/db/alter_replica_13.sim
-,,y,script,./test.sh -f tsim/sync/3Replica1VgElect.sim
-,,y,script,./test.sh -f tsim/sync/3Replica5VgElect.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError6.sim
-,,y,script,./test.sh -f tsim/user/basic.sim
,,y,script,./test.sh -f tsim/user/password.sim
-,,y,script,./test.sh -f tsim/user/whitelist.sim
-,,y,script,./test.sh -f tsim/user/privilege_db.sim
-,,y,script,./test.sh -f tsim/user/privilege_sysinfo.sim
-,,y,script,./test.sh -f tsim/user/privilege_topic.sim
-,,y,script,./test.sh -f tsim/user/privilege_table.sim
-,,y,script,./test.sh -f tsim/user/privilege_create_db.sim
-,,y,script,./test.sh -f tsim/db/alter_option.sim
-,,y,script,./test.sh -f tsim/db/dnodelist.sim
-# ,,y,script,./test.sh -f tsim/db/alter_replica_31.sim
-,,y,script,./test.sh -f tsim/db/basic1.sim
-,,y,script,./test.sh -f tsim/db/basic2.sim
-,,y,script,./test.sh -f tsim/db/basic3.sim
-,,y,script,./test.sh -f tsim/db/basic4.sim
-,,y,script,./test.sh -f tsim/db/basic5.sim
-,,y,script,./test.sh -f tsim/db/basic6.sim
-,,y,script,./test.sh -f tsim/db/commit.sim
-,,y,script,./test.sh -f tsim/db/create_all_options.sim
-,,y,script,./test.sh -f tsim/db/delete_reuse1.sim
-,,y,script,./test.sh -f tsim/db/delete_reuse2.sim
-,,y,script,./test.sh -f tsim/db/delete_reusevnode.sim
-,,y,script,./test.sh -f tsim/db/delete_reusevnode2.sim
-,,y,script,./test.sh -f tsim/db/delete_writing1.sim
-,,y,script,./test.sh -f tsim/db/delete_writing2.sim
-,,y,script,./test.sh -f tsim/db/error1.sim
-,,y,script,./test.sh -f tsim/db/keep.sim
-,,y,script,./test.sh -f tsim/db/len.sim
-,,y,script,./test.sh -f tsim/db/repeat.sim
-,,y,script,./test.sh -f tsim/db/show_create_db.sim
-,,y,script,./test.sh -f tsim/db/show_create_table.sim
-,,y,script,./test.sh -f tsim/db/tables.sim
-,,y,script,./test.sh -f tsim/db/taosdlog.sim
-,,y,script,./test.sh -f tsim/db/table_prefix_suffix.sim
-,,y,script,./test.sh -f tsim/dnode/balance_replica1.sim
-,,y,script,./test.sh -f tsim/dnode/balance_replica3.sim
-,,y,script,./test.sh -f tsim/dnode/balance1.sim
-,,y,script,./test.sh -f tsim/dnode/balancex.sim
-,,y,script,./test.sh -f tsim/dnode/create_dnode.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_mnode.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_qnode_snode.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_vnode_replica1.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_vnode_replica3.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_multi_vnode_replica1.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_multi_vnode_replica3.sim
-,,y,script,./test.sh -f tsim/dnode/drop_dnode_force.sim
-,,y,script,./test.sh -f tsim/dnode/offline_reason.sim
-,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica1.sim
-,,y,script,./test.sh -f tsim/dnode/vnode_clean.sim
-,,y,script,./test.sh -f tsim/dnode/use_dropped_dnode.sim
-,,y,script,./test.sh -f tsim/dnode/split_vgroup_replica1.sim
-,,y,script,./test.sh -f tsim/dnode/split_vgroup_replica3.sim
-,,y,script,./test.sh -f tsim/import/basic.sim
-,,y,script,./test.sh -f tsim/import/commit.sim
-,,y,script,./test.sh -f tsim/import/large.sim
-,,y,script,./test.sh -f tsim/import/replica1.sim
-,,y,script,./test.sh -f tsim/insert/backquote.sim
-,,y,script,./test.sh -f tsim/insert/basic.sim
-,,y,script,./test.sh -f tsim/insert/basic0.sim
-,,y,script,./test.sh -f tsim/insert/basic1.sim
-,,y,script,./test.sh -f tsim/insert/basic2.sim
-,,y,script,./test.sh -f tsim/insert/commit-merge0.sim
-,,y,script,./test.sh -f tsim/insert/insert_drop.sim
-,,y,script,./test.sh -f tsim/insert/insert_select.sim
-,,y,script,./test.sh -f tsim/insert/null.sim
-,,y,script,./test.sh -f tsim/insert/query_block1_file.sim
-,,y,script,./test.sh -f tsim/insert/query_block1_memory.sim
-,,y,script,./test.sh -f tsim/insert/query_block2_file.sim
-,,y,script,./test.sh -f tsim/insert/query_block2_memory.sim
-,,y,script,./test.sh -f tsim/insert/query_file_memory.sim
-,,y,script,./test.sh -f tsim/insert/query_multi_file.sim
-,,y,script,./test.sh -f tsim/insert/tcp.sim
-,,y,script,./test.sh -f tsim/insert/update0.sim
-,,y,script,./test.sh -f tsim/insert/delete0.sim
-,,y,script,./test.sh -f tsim/insert/update1_sort_merge.sim
-,,y,script,./test.sh -f tsim/insert/update2.sim
-,,y,script,./test.sh -f tsim/insert/insert_stb.sim
-,,y,script,./test.sh -f tsim/parser/alter__for_community_version.sim
-,,y,script,./test.sh -f tsim/parser/alter_column.sim
-,,y,script,./test.sh -f tsim/parser/alter_stable.sim
-,,y,script,./test.sh -f tsim/parser/alter.sim
-,,y,script,./test.sh -f tsim/parser/alter1.sim
-,,y,script,./test.sh -f tsim/parser/auto_create_tb_drop_tb.sim
-,,y,script,./test.sh -f tsim/parser/auto_create_tb.sim
-,,y,script,./test.sh -f tsim/parser/between_and.sim
-,,y,script,./test.sh -f tsim/parser/binary_escapeCharacter.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_bigint.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_bool.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_double.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_float.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_int.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_smallint.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_tinyint.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_unsign.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_uint.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_timestamp.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_varchar.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_nchar.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_varbinary.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_json.sim
-,,y,script,./test.sh -f tsim/parser/columnValue_geometry.sim
-,,y,script,./test.sh -f tsim/parser/condition.sim
-,,y,script,./test.sh -f tsim/parser/condition_scl.sim
-,,y,script,./test.sh -f tsim/parser/constCol.sim
-,,y,script,./test.sh -f tsim/parser/create_db.sim
-,,y,script,./test.sh -f tsim/parser/create_mt.sim
-,,y,script,./test.sh -f tsim/parser/create_tb_with_tag_name.sim
-,,y,script,./test.sh -f tsim/parser/create_tb.sim
-,,y,script,./test.sh -f tsim/parser/dbtbnameValidate.sim
-,,y,script,./test.sh -f tsim/parser/distinct.sim
-,,y,script,./test.sh -f tsim/parser/fill_us.sim
-,,y,script,./test.sh -f tsim/parser/fill.sim
-,,y,script,./test.sh -f tsim/parser/first_last.sim
-,,y,script,./test.sh -f tsim/parser/fill_stb.sim
-,,y,script,./test.sh -f tsim/parser/interp.sim
-,,y,script,./test.sh -f tsim/parser/fourArithmetic-basic.sim
-,,y,script,./test.sh -f tsim/parser/function.sim
-,,y,script,./test.sh -f tsim/parser/groupby-basic.sim
-,,y,script,./test.sh -f tsim/parser/having_child.sim
-,,y,script,./test.sh -f tsim/parser/having.sim
-,,y,script,./test.sh -f tsim/parser/import_commit1.sim
-,,y,script,./test.sh -f tsim/parser/import_commit2.sim
-,,y,script,./test.sh -f tsim/parser/import_commit3.sim
-,,y,script,./test.sh -f tsim/parser/import_file.sim
-,,y,script,./test.sh -f tsim/parser/import.sim
-,,y,script,./test.sh -f tsim/parser/insert_multiTbl.sim
-,,y,script,./test.sh -f tsim/parser/insert_tb.sim
-,,y,script,./test.sh -f tsim/parser/join_multitables.sim
-,,y,script,./test.sh -f tsim/parser/join_multivnode.sim
-,,y,script,./test.sh -f tsim/parser/join.sim
-,,y,script,./test.sh -f tsim/parser/last_cache.sim
-,,y,script,./test.sh -f tsim/parser/last_both.sim
-,,y,script,./test.sh -f tsim/parser/last_groupby.sim
-,,y,script,./test.sh -f tsim/parser/lastrow.sim
-,,y,script,./test.sh -f tsim/parser/lastrow2.sim
-,,y,script,./test.sh -f tsim/parser/like.sim
-,,y,script,./test.sh -f tsim/parser/limit.sim
-,,y,script,./test.sh -f tsim/parser/mixed_blocks.sim
-,,y,script,./test.sh -f tsim/parser/nchar.sim
-,,y,script,./test.sh -f tsim/parser/null_char.sim
-,,y,script,./test.sh -f tsim/parser/precision_ns.sim
-,,y,script,./test.sh -f tsim/parser/projection_limit_offset.sim
-,,y,script,./test.sh -f tsim/parser/regex.sim
-,,y,script,./test.sh -f tsim/parser/regressiontest.sim
-,,y,script,./test.sh -f tsim/parser/select_across_vnodes.sim
-,,y,script,./test.sh -f tsim/parser/select_distinct_tag.sim
-,,y,script,./test.sh -f tsim/parser/select_from_cache_disk.sim
-,,y,script,./test.sh -f tsim/parser/select_with_tags.sim
-,,y,script,./test.sh -f tsim/parser/selectResNum.sim
-,,y,script,./test.sh -f tsim/parser/set_tag_vals.sim
-,,y,script,./test.sh -f tsim/parser/single_row_in_tb.sim
-,,y,script,./test.sh -f tsim/parser/slimit_alter_tags.sim
-,,y,script,./test.sh -f tsim/parser/slimit.sim
-,,y,script,./test.sh -f tsim/parser/slimit1.sim
-,,y,script,./test.sh -f tsim/parser/stableOp.sim
-,,y,script,./test.sh -f tsim/parser/tags_dynamically_specifiy.sim
-,,y,script,./test.sh -f tsim/parser/tags_filter.sim
-,,y,script,./test.sh -f tsim/parser/tbnameIn.sim
-,,y,script,./test.sh -f tsim/parser/timestamp.sim
-,,y,script,./test.sh -f tsim/parser/top_groupby.sim
-,,y,script,./test.sh -f tsim/parser/topbot.sim
-,,y,script,./test.sh -f tsim/parser/union_sysinfo.sim
-,,y,script,./test.sh -f tsim/parser/slimit_limit.sim
-,,y,script,./test.sh -f tsim/parser/table_merge_limit.sim
-,,y,script,./test.sh -f tsim/query/tagLikeFilter.sim
-,,y,script,./test.sh -f tsim/query/charScalarFunction.sim
-,,y,script,./test.sh -f tsim/query/explain.sim
-,,y,script,./test.sh -f tsim/query/interval-offset.sim
-,,y,script,./test.sh -f tsim/query/interval.sim
-,,y,script,./test.sh -f tsim/query/scalarFunction.sim
-,,y,script,./test.sh -f tsim/query/scalarNull.sim
-,,y,script,./test.sh -f tsim/query/session.sim
-,,y,script,./test.sh -f tsim/query/udf.sim
-,,n,script,./test.sh -f tsim/query/udfpy.sim
-,,y,script,./test.sh -f tsim/query/udf_with_const.sim
-,,y,script,./test.sh -f tsim/query/join_interval.sim
-,,y,script,./test.sh -f tsim/query/join_pk.sim
-,,y,script,./test.sh -f tsim/query/join_order.sim
-,,y,script,./test.sh -f tsim/query/count_spread.sim
-,,y,script,./test.sh -f tsim/query/unionall_as_table.sim
-,,y,script,./test.sh -f tsim/query/multi_order_by.sim
-,,y,script,./test.sh -f tsim/query/sys_tbname.sim
-,,y,script,./test.sh -f tsim/query/sort-pre-cols.sim
-,,y,script,./test.sh -f tsim/query/groupby.sim
-,,y,script,./test.sh -f tsim/query/groupby_distinct.sim
-,,y,script,./test.sh -f tsim/query/event.sim
-,,y,script,./test.sh -f tsim/query/forceFill.sim
-,,y,script,./test.sh -f tsim/query/emptyTsRange.sim
-,,y,script,./test.sh -f tsim/query/emptyTsRange_scl.sim
-,,y,script,./test.sh -f tsim/query/partitionby.sim
-,,y,script,./test.sh -f tsim/query/tableCount.sim
-,,y,script,./test.sh -f tsim/query/show_db_table_kind.sim
-,,y,script,./test.sh -f tsim/query/bi_star_table.sim
-,,y,script,./test.sh -f tsim/query/bi_tag_scan.sim
-,,y,script,./test.sh -f tsim/query/bi_tbname_col.sim
-,,y,script,./test.sh -f tsim/query/tag_scan.sim
-,,y,script,./test.sh -f tsim/query/nullColSma.sim
-,,y,script,./test.sh -f tsim/query/bug3398.sim
-,,y,script,./test.sh -f tsim/query/explain_tsorder.sim
-,,y,script,./test.sh -f tsim/query/apercentile.sim
-,,y,script,./test.sh -f tsim/query/query_count0.sim
-,,y,script,./test.sh -f tsim/query/query_count_sliding0.sim
-,,y,script,./test.sh -f tsim/query/union_precision.sim
-,,y,script,./test.sh -f tsim/qnode/basic1.sim
-,,y,script,./test.sh -f tsim/snode/basic1.sim
-,,y,script,./test.sh -f tsim/mnode/basic1.sim
-,,y,script,./test.sh -f tsim/mnode/basic2.sim
-#,,y,script,./test.sh -f tsim/mnode/basic3.sim
-,,y,script,./test.sh -f tsim/mnode/basic4.sim
-,,y,script,./test.sh -f tsim/mnode/basic5.sim
-,,y,script,./test.sh -f tsim/mnode/basic6.sim
-,,y,script,./test.sh -f tsim/show/basic.sim
-,,y,script,./test.sh -f tsim/table/autocreate.sim
-,,y,script,./test.sh -f tsim/table/basic1.sim
-,,y,script,./test.sh -f tsim/table/basic2.sim
-,,y,script,./test.sh -f tsim/table/basic3.sim
-,,y,script,./test.sh -f tsim/table/bigint.sim
-,,y,script,./test.sh -f tsim/table/binary.sim
-,,y,script,./test.sh -f tsim/table/bool.sim
-,,y,script,./test.sh -f tsim/table/column_name.sim
-,,y,script,./test.sh -f tsim/table/column_num.sim
-,,y,script,./test.sh -f tsim/table/column_value.sim
-,,y,script,./test.sh -f tsim/table/column2.sim
-,,y,script,./test.sh -f tsim/table/createmulti.sim
-,,y,script,./test.sh -f tsim/table/date.sim
-,,y,script,./test.sh -f tsim/table/db.table.sim
-,,y,script,./test.sh -f tsim/table/delete_reuse1.sim
-,,y,script,./test.sh -f tsim/table/delete_reuse2.sim
-,,y,script,./test.sh -f tsim/table/delete_writing.sim
-,,y,script,./test.sh -f tsim/table/describe.sim
-,,y,script,./test.sh -f tsim/table/double.sim
-,,y,script,./test.sh -f tsim/table/float.sim
-,,y,script,./test.sh -f tsim/table/hash.sim
-,,y,script,./test.sh -f tsim/table/int.sim
-,,y,script,./test.sh -f tsim/table/limit.sim
-,,y,script,./test.sh -f tsim/table/smallint.sim
-,,y,script,./test.sh -f tsim/table/table_len.sim
-,,y,script,./test.sh -f tsim/table/table.sim
-,,y,script,./test.sh -f tsim/table/tinyint.sim
-,,y,script,./test.sh -f tsim/table/vgroup.sim
-,,n,script,./test.sh -f tsim/stream/basic0.sim -g
-,,y,script,./test.sh -f tsim/stream/basic1.sim
-,,y,script,./test.sh -f tsim/stream/basic2.sim
-,,y,script,./test.sh -f tsim/stream/basic3.sim
-,,y,script,./test.sh -f tsim/stream/basic4.sim
-,,y,script,./test.sh -f tsim/stream/basic5.sim
-,,y,script,./test.sh -f tsim/stream/tag.sim
-,,y,script,./test.sh -f tsim/stream/snodeCheck.sim
-,,y,script,./test.sh -f tsim/stream/concurrentcheckpt.sim
-,,y,script,./test.sh -f tsim/stream/checkpointInterval0.sim
-,,y,script,./test.sh -f tsim/stream/checkStreamSTable1.sim
-,,y,script,./test.sh -f tsim/stream/checkStreamSTable.sim
-,,y,script,./test.sh -f tsim/stream/count0.sim
-,,y,script,./test.sh -f tsim/stream/count1.sim
-,,y,script,./test.sh -f tsim/stream/count2.sim
-,,y,script,./test.sh -f tsim/stream/count3.sim
-,,y,script,./test.sh -f tsim/stream/countSliding0.sim
-,,y,script,./test.sh -f tsim/stream/countSliding1.sim
-,,y,script,./test.sh -f tsim/stream/countSliding2.sim
-,,y,script,./test.sh -f tsim/stream/deleteInterval.sim
-,,y,script,./test.sh -f tsim/stream/deleteScalar.sim
-,,y,script,./test.sh -f tsim/stream/deleteSession.sim
-,,y,script,./test.sh -f tsim/stream/deleteState.sim
-,,y,script,./test.sh -f tsim/stream/distributeInterval0.sim
-,,y,script,./test.sh -f tsim/stream/distributeIntervalRetrive0.sim
-,,y,script,./test.sh -f tsim/stream/distributeMultiLevelInterval0.sim
-,,y,script,./test.sh -f tsim/stream/distributeSession0.sim
-,,y,script,./test.sh -f tsim/stream/drop_stream.sim
-,,y,script,./test.sh -f tsim/stream/event0.sim
-,,y,script,./test.sh -f tsim/stream/event1.sim
-,,y,script,./test.sh -f tsim/stream/event2.sim
-,,y,script,./test.sh -f tsim/stream/fillHistoryBasic1.sim
-,,y,script,./test.sh -f tsim/stream/fillHistoryBasic2.sim
-,,y,script,./test.sh -f tsim/stream/fillHistoryBasic3.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalDelete0.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalDelete1.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalLinear.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalPartitionBy.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalPrevNext1.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalPrevNext.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalRange.sim
-,,y,script,./test.sh -f tsim/stream/fillIntervalValue.sim
-,,y,script,./test.sh -f tsim/stream/ignoreCheckUpdate.sim
-,,y,script,./test.sh -f tsim/stream/ignoreExpiredData.sim
-,,y,script,./test.sh -f tsim/stream/partitionby1.sim
-,,y,script,./test.sh -f tsim/stream/partitionbyColumnInterval.sim
-,,y,script,./test.sh -f tsim/stream/partitionbyColumnOther.sim
-,,y,script,./test.sh -f tsim/stream/partitionbyColumnSession.sim
-,,y,script,./test.sh -f tsim/stream/partitionbyColumnState.sim
-,,y,script,./test.sh -f tsim/stream/partitionby.sim
-,,y,script,./test.sh -f tsim/stream/pauseAndResume.sim
-,,y,script,./test.sh -f tsim/stream/schedSnode.sim
-,,y,script,./test.sh -f tsim/stream/session0.sim
-,,y,script,./test.sh -f tsim/stream/session1.sim
-,,y,script,./test.sh -f tsim/stream/sliding.sim
-,,y,script,./test.sh -f tsim/stream/state0.sim
-,,y,script,./test.sh -f tsim/stream/state1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpDelete0.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpDelete1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpDelete2.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpError.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpForceWindowClose.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpForceWindowClose1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpFwcError.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpHistory.sim
-#,,y,script,./test.sh -f tsim/stream/streamInterpHistory1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpLarge.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpLinear0.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpNext0.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpOther.sim
-#,,y,script,./test.sh -f tsim/stream/streamInterpOther1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPartitionBy0.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPartitionBy1.sim
-#,,y,script,./test.sh -f tsim/stream/streamInterpPrev0.sim
-#,,y,script,./test.sh -f tsim/stream/streamInterpPrev1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey0.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey2.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey3.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpUpdate.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpUpdate1.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpUpdate2.sim
-,,y,script,./test.sh -f tsim/stream/streamInterpValue0.sim
-,,y,script,./test.sh -f tsim/stream/streamPrimaryKey0.sim
-,,y,script,./test.sh -f tsim/stream/streamPrimaryKey1.sim
-,,y,script,./test.sh -f tsim/stream/streamPrimaryKey2.sim
-,,y,script,./test.sh -f tsim/stream/streamPrimaryKey3.sim
-,,y,script,./test.sh -f tsim/stream/streamTwaError.sim
-,,y,script,./test.sh -f tsim/stream/streamTwaFwcFill.sim
-,,y,script,./test.sh -f tsim/stream/streamTwaFwcFillPrimaryKey.sim
-,,y,script,./test.sh -f tsim/stream/streamTwaFwcIntervalPrimaryKey.sim
-,,y,script,./test.sh -f tsim/stream/streamTwaInterpFwc.sim
-,,y,script,./test.sh -f tsim/stream/triggerInterval0.sim
-,,y,script,./test.sh -f tsim/stream/triggerSession0.sim
-,,y,script,./test.sh -f tsim/stream/udTableAndCol0.sim
-,,y,script,./test.sh -f tsim/stream/udTableAndTag0.sim
-,,y,script,./test.sh -f tsim/stream/udTableAndTag1.sim
-,,y,script,./test.sh -f tsim/stream/udTableAndTag2.sim
-,,y,script,./test.sh -f tsim/stream/windowClose.sim
-,,y,script,./test.sh -f tsim/trans/lossdata1.sim
-,,y,script,./test.sh -f tsim/tmq/basic1.sim
-,,y,script,./test.sh -f tsim/tmq/basic2.sim
-,,y,script,./test.sh -f tsim/tmq/basic3.sim
-,,y,script,./test.sh -f tsim/tmq/basic4.sim
-,,y,script,./test.sh -f tsim/tmq/basic1Of2Cons.sim
-,,y,script,./test.sh -f tsim/tmq/basic2Of2Cons.sim
-,,y,script,./test.sh -f tsim/tmq/basic3Of2Cons.sim
-,,y,script,./test.sh -f tsim/tmq/basic4Of2Cons.sim
-,,y,script,./test.sh -f tsim/tmq/topic.sim
-,,y,script,./test.sh -f tsim/tmq/snapshot.sim
-,,y,script,./test.sh -f tsim/tmq/snapshot1.sim
-,,y,script,./test.sh -f tsim/stable/alter_comment.sim
-,,y,script,./test.sh -f tsim/stable/alter_count.sim
-,,y,script,./test.sh -f tsim/stable/alter_import.sim
-,,y,script,./test.sh -f tsim/stable/alter_insert1.sim
-,,y,script,./test.sh -f tsim/stable/alter_insert2.sim
-,,y,script,./test.sh -f tsim/stable/alter_metrics.sim
-,,y,script,./test.sh -f tsim/stable/column_add.sim
-,,y,script,./test.sh -f tsim/stable/column_drop.sim
-,,y,script,./test.sh -f tsim/stable/column_modify.sim
-,,y,script,./test.sh -f tsim/stable/disk.sim
-,,y,script,./test.sh -f tsim/stable/dnode3.sim
-,,y,script,./test.sh -f tsim/stable/metrics.sim
-,,y,script,./test.sh -f tsim/stable/refcount.sim
-,,y,script,./test.sh -f tsim/stable/tag_add.sim
-,,y,script,./test.sh -f tsim/stable/tag_drop.sim
-,,y,script,./test.sh -f tsim/stable/tag_filter.sim
-,,y,script,./test.sh -f tsim/stable/tag_modify.sim
-,,y,script,./test.sh -f tsim/stable/tag_rename.sim
-,,y,script,./test.sh -f tsim/stable/values.sim
-,,y,script,./test.sh -f tsim/stable/vnode3.sim
-,,n,script,./test.sh -f tsim/sma/drop_sma.sim
-,,y,script,./test.sh -f tsim/sma/sma_leak.sim
-,,y,script,./test.sh -f tsim/sma/tsmaCreateInsertQuery.sim
-,,y,script,./test.sh -f tsim/sma/rsmaCreateInsertQuery.sim
-,,y,script,./test.sh -f tsim/sma/rsmaCreateInsertQueryDelete.sim
-
-### refactor stream backend, open case after rsma refactored
-#,,y,script,./test.sh -f tsim/sma/rsmaPersistenceRecovery.sim
-,,y,script,./test.sh -f tsim/sync/vnodesnapshot-rsma-test.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError1.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError2.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError3.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError4.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError5.sim
-,,n,script,./test.sh -f tsim/valgrind/checkError8.sim
-,,n,script,./test.sh -f tsim/valgrind/checkUdf.sim
-,,y,script,./test.sh -f tsim/vnode/replica3_basic.sim
-,,y,script,./test.sh -f tsim/vnode/replica3_vgroup.sim
-,,y,script,./test.sh -f tsim/vnode/replica3_import.sim
-,,y,script,./test.sh -f tsim/vnode/stable_balance_replica1.sim
-,,y,script,./test.sh -f tsim/vnode/stable_dnode2_stop.sim
-,,y,script,./test.sh -f tsim/vnode/stable_dnode2.sim
-,,y,script,./test.sh -f tsim/vnode/stable_dnode3.sim
-,,y,script,./test.sh -f tsim/vnode/stable_replica3_dnode6.sim
-,,y,script,./test.sh -f tsim/vnode/stable_replica3_vnode3.sim
-,,y,script,./test.sh -f tsim/sync/oneReplica1VgElect.sim
-,,y,script,./test.sh -f tsim/sync/oneReplica5VgElect.sim
-,,y,script,./test.sh -f tsim/catalog/alterInCurrent.sim
-,,y,script,./test.sh -f tsim/scalar/in.sim
-,,y,script,./test.sh -f tsim/scalar/scalar.sim
-,,y,script,./test.sh -f tsim/scalar/filter.sim
-,,y,script,./test.sh -f tsim/scalar/caseWhen.sim
-,,y,script,./test.sh -f tsim/scalar/tsConvert.sim
-,,y,script,./test.sh -f tsim/alter/cached_schema_after_alter.sim
-,,y,script,./test.sh -f tsim/alter/dnode.sim
-,,y,script,./test.sh -f tsim/alter/table.sim
-,,y,script,./test.sh -f tsim/cache/new_metrics.sim
-,,y,script,./test.sh -f tsim/cache/restart_table.sim
-,,y,script,./test.sh -f tsim/cache/restart_metrics.sim
-,,y,script,./test.sh -f tsim/column/commit.sim
-,,y,script,./test.sh -f tsim/column/metrics.sim
-,,y,script,./test.sh -f tsim/column/table.sim
-,,y,script,./test.sh -f tsim/compress/commitlog.sim
-,,y,script,./test.sh -f tsim/compress/compress2.sim
-,,y,script,./test.sh -f tsim/compress/compress.sim
-,,y,script,./test.sh -f tsim/compress/compress_col.sim
-,,y,script,./test.sh -f tsim/compress/uncompress.sim
-,,y,script,./test.sh -f tsim/compute/avg.sim
-,,y,script,./test.sh -f tsim/compute/block_dist.sim
-,,y,script,./test.sh -f tsim/compute/bottom.sim
-,,y,script,./test.sh -f tsim/compute/count.sim
-,,y,script,./test.sh -f tsim/compute/diff.sim
-,,y,script,./test.sh -f tsim/compute/diff2.sim
-,,y,script,./test.sh -f tsim/compute/first.sim
-,,y,script,./test.sh -f tsim/compute/interval.sim
-,,y,script,./test.sh -f tsim/compute/interval1.sim
-,,y,script,./test.sh -f tsim/compute/last_row.sim
-,,y,script,./test.sh -f tsim/compute/last.sim
-,,y,script,./test.sh -f tsim/compute/leastsquare.sim
-,,y,script,./test.sh -f tsim/compute/max.sim
-,,y,script,./test.sh -f tsim/compute/min.sim
-,,y,script,./test.sh -f tsim/compute/null.sim
-,,y,script,./test.sh -f tsim/compute/percentile.sim
-,,y,script,./test.sh -f tsim/compute/stddev.sim
-,,y,script,./test.sh -f tsim/compute/sum.sim
-,,y,script,./test.sh -f tsim/compute/top.sim
-,,y,script,./test.sh -f tsim/compute/disk_usage.sim
-,,y,script,./test.sh -f tsim/field/2.sim
-,,y,script,./test.sh -f tsim/field/3.sim
-,,y,script,./test.sh -f tsim/field/4.sim
-,,y,script,./test.sh -f tsim/field/5.sim
-,,y,script,./test.sh -f tsim/field/6.sim
-,,y,script,./test.sh -f tsim/field/binary.sim
-,,y,script,./test.sh -f tsim/field/bigint.sim
-,,y,script,./test.sh -f tsim/field/bool.sim
-,,y,script,./test.sh -f tsim/field/double.sim
-,,y,script,./test.sh -f tsim/field/float.sim
-,,y,script,./test.sh -f tsim/field/int.sim
-,,y,script,./test.sh -f tsim/field/single.sim
-,,y,script,./test.sh -f tsim/field/smallint.sim
-,,y,script,./test.sh -f tsim/field/tinyint.sim
-,,y,script,./test.sh -f tsim/field/unsigined_bigint.sim
-,,y,script,./test.sh -f tsim/vector/metrics_field.sim
-,,y,script,./test.sh -f tsim/vector/metrics_mix.sim
-,,y,script,./test.sh -f tsim/vector/metrics_query.sim
-,,y,script,./test.sh -f tsim/vector/metrics_tag.sim
-,,y,script,./test.sh -f tsim/vector/metrics_time.sim
-,,y,script,./test.sh -f tsim/vector/multi.sim
-,,y,script,./test.sh -f tsim/vector/single.sim
-,,y,script,./test.sh -f tsim/vector/table_field.sim
-,,y,script,./test.sh -f tsim/vector/table_mix.sim
-,,y,script,./test.sh -f tsim/vector/table_query.sim
-,,y,script,./test.sh -f tsim/vector/table_time.sim
-,,y,script,./test.sh -f tsim/wal/kill.sim
-,,y,script,./test.sh -f tsim/tag/3.sim
-,,y,script,./test.sh -f tsim/tag/4.sim
-,,y,script,./test.sh -f tsim/tag/5.sim
-,,y,script,./test.sh -f tsim/tag/6.sim
-,,y,script,./test.sh -f tsim/tag/add.sim
-,,y,script,./test.sh -f tsim/tag/bigint.sim
-,,y,script,./test.sh -f tsim/tag/binary_binary.sim
-,,y,script,./test.sh -f tsim/tag/binary.sim
-,,y,script,./test.sh -f tsim/tag/bool_binary.sim
-,,y,script,./test.sh -f tsim/tag/bool_int.sim
-,,y,script,./test.sh -f tsim/tag/bool.sim
-,,y,script,./test.sh -f tsim/tag/change.sim
-,,y,script,./test.sh -f tsim/tag/column.sim
-,,y,script,./test.sh -f tsim/tag/commit.sim
-,,y,script,./test.sh -f tsim/tag/create.sim
-,,y,script,./test.sh -f tsim/tag/delete.sim
-,,y,script,./test.sh -f tsim/tag/double.sim
-,,y,script,./test.sh -f tsim/tag/filter.sim
-,,y,script,./test.sh -f tsim/tag/float.sim
-,,y,script,./test.sh -f tsim/tag/int_binary.sim
-,,y,script,./test.sh -f tsim/tag/int_float.sim
-,,y,script,./test.sh -f tsim/tag/int.sim
-,,y,script,./test.sh -f tsim/tag/set.sim
-,,y,script,./test.sh -f tsim/tag/smallint.sim
-,,y,script,./test.sh -f tsim/tag/tinyint.sim
-,,y,script,./test.sh -f tsim/tag/drop_tag.sim
-,,y,script,./test.sh -f tsim/tag/tbNameIn.sim
-,,y,script,./test.sh -f tsim/tag/change_multi_tag.sim
-,,y,script,./test.sh -f tmp/monitor.sim
-,,y,script,./test.sh -f tsim/tagindex/add_index.sim
-,,n,script,./test.sh -f tsim/tagindex/sma_and_tag_index.sim
-,,y,script,./test.sh -f tsim/tagindex/indexOverflow.sim
-,,y,script,./test.sh -f tsim/view/view.sim
-,,y,script,./test.sh -f tsim/query/cache_last.sim
-,,y,script,./test.sh -f tsim/query/const.sim
-,,y,script,./test.sh -f tsim/query/nestedJoinView.sim
-
-
-
-#develop test
-,,n,develop-test,python3 ./test.py -f 2-query/table_count_scan.py
-,,n,develop-test,python3 ./test.py -f 2-query/pseudo_column.py
-,,n,develop-test,python3 ./test.py -f 2-query/ts-range.py
-,,n,develop-test,python3 ./test.py -f 2-query/tag_scan.py
-,,n,develop-test,python3 ./test.py -f 2-query/show_create_db.py
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index fc1594f26a..b55d02abe2 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -271,12 +271,12 @@ sql create user u25 pass 'taosdata1~'
sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
-sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
sql_error alter USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7';
From cb8f132077c1b537f7b2cfa4f87b0081cbba4130 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 08:43:39 +0000
Subject: [PATCH 014/105] feat/TS-5927-long-password-retry-fail-case
---
tests/script/tsim/user/password.sim | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index b55d02abe2..1d8d48b826 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -272,11 +272,11 @@ sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
#sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
sql_error alter USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7';
From 1272524f7f581bd36db319675dc648e0aef9b8e5 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 09:05:31 +0000
Subject: [PATCH 015/105] retry case
---
tests/script/tsim/user/password.sim | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 1d8d48b826..89aec130c7 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -271,10 +271,10 @@ sql create user u25 pass 'taosdata1~'
sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
-#sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
From 49173c601800d46ab62431de3dc8dbce8327579a Mon Sep 17 00:00:00 2001
From: dmchen
Date: Fri, 14 Feb 2025 09:06:59 +0000
Subject: [PATCH 016/105] retry case
---
tests/script/tsim/user/password.sim | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 89aec130c7..37fd867586 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -276,7 +276,7 @@ sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREA
#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
sql_error alter USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7';
From 9364bb63f9bd47ee58ba4671159ced70c10e437c Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 01:24:55 +0000
Subject: [PATCH 017/105] retry case
---
tests/script/tsim/user/password.sim | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 37fd867586..cd7755b476 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -272,7 +272,7 @@ sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
From 854189040553fe4e0750f347918140ed94a18bb9 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 01:54:08 +0000
Subject: [PATCH 018/105] retry case
---
source/libs/parser/inc/parAst.h | 2 +-
source/libs/parser/src/parAstCreater.c | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/source/libs/parser/inc/parAst.h b/source/libs/parser/inc/parAst.h
index 559c612807..559009d215 100644
--- a/source/libs/parser/inc/parAst.h
+++ b/source/libs/parser/inc/parAst.h
@@ -254,7 +254,7 @@ SNode* createShowDnodeVariablesStmt(SAstCreateContext* pCxt, SNode* pDnodeId, SN
SNode* createShowVnodesStmt(SAstCreateContext* pCxt, SNode* pDnodeId, SNode* pDnodeEndpoint);
SNode* createShowTableTagsStmt(SAstCreateContext* pCxt, SNode* pTbName, SNode* pDbName, SNodeList* pTags);
SNode* createCreateUserStmt(SAstCreateContext* pCxt, SToken* pUserName, const SToken* pPassword, int8_t sysinfo,
- int8_t is_import, int8_t createdb);
+ int8_t createdb, int8_t is_import);
SNode* addCreateUserStmtWhiteList(SAstCreateContext* pCxt, SNode* pStmt, SNodeList* pIpRangesNodeList);
SNode* createAlterUserStmt(SAstCreateContext* pCxt, SToken* pUserName, int8_t alterType, void* pAlterInfo);
SNode* createDropUserStmt(SAstCreateContext* pCxt, SToken* pUserName);
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 908047fed3..fe6b87f79f 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -3083,7 +3083,7 @@ _err:
}
SNode* createCreateUserStmt(SAstCreateContext* pCxt, SToken* pUserName, const SToken* pPassword, int8_t sysinfo,
- int8_t is_import, int8_t createDb) {
+ int8_t createDb, int8_t is_import) {
CHECK_PARSER_STATUS(pCxt);
char password[TSDB_USET_PASSWORD_LONGLEN + 3] = {0};
CHECK_NAME(checkUserName(pCxt, pUserName));
From 538ce1948ca4cc97370583efd97c42a583c6de67 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 02:09:57 +0000
Subject: [PATCH 019/105] retry case
---
source/libs/parser/inc/sql.y | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/source/libs/parser/inc/sql.y b/source/libs/parser/inc/sql.y
index 56b9afad9b..886592da6b 100755
--- a/source/libs/parser/inc/sql.y
+++ b/source/libs/parser/inc/sql.y
@@ -115,7 +115,7 @@ is_import_opt(A) ::= IS_IMPORT NK_INTEGER(B).
is_createdb_opt(A) ::= . { A = 0; }
is_createdb_opt(A) ::= CREATEDB NK_INTEGER(B). { A = taosStr2Int8(B.z, NULL, 10); }
/************************************************ create/alter/drop user **********************************************/
-cmd ::= CREATE USER user_name(A) PASS NK_STRING(B) sysinfo_opt(C) is_createdb_opt(F) is_import_opt(E)
+cmd ::= CREATE USER user_name(A) PASS NK_STRING(B) sysinfo_opt(C) is_createdb_opt(E) is_import_opt(F)
white_list_opt(D). {
pCxt->pRootNode = createCreateUserStmt(pCxt, &A, &B, C, E, F);
pCxt->pRootNode = addCreateUserStmtWhiteList(pCxt, pCxt->pRootNode, D);
From 6dc6a59b9b6b58dd989f7bc975d51988bbc03df7 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 02:53:32 +0000
Subject: [PATCH 020/105] retry case
---
tests/script/tsim/user/password.sim | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index cd7755b476..1b9d159413 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -87,6 +87,7 @@ sql create user user_p7 pass 'abcd!@1234567'
sql create user user_p8 pass 'abcd!@123456789'
sql create user user_p9 pass 'abcd!@1234567890'
sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T'
+sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' is_import 0
sql drop user user_p2
sql drop user user_p3
sql drop user user_p4
@@ -272,7 +273,7 @@ sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
From 2dcddb024febd95c5daa4c77a6d5667b7db48c1f Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 03:26:51 +0000
Subject: [PATCH 021/105] retry case
---
tests/script/tsim/user/password.sim | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 1b9d159413..d75aed5ab2 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -87,7 +87,7 @@ sql create user user_p7 pass 'abcd!@1234567'
sql create user user_p8 pass 'abcd!@123456789'
sql create user user_p9 pass 'abcd!@1234567890'
sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T'
-sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' is_import 0
+sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' cratedb 0 is_import 0
sql drop user user_p2
sql drop user user_p3
sql drop user user_p4
From 9ce37c3ec8e64217a979373a051933039f4e1135 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 04:54:49 +0000
Subject: [PATCH 022/105] retry case
---
tests/script/tsim/user/password.sim | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index d75aed5ab2..514f3d80d4 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -87,7 +87,7 @@ sql create user user_p7 pass 'abcd!@1234567'
sql create user user_p8 pass 'abcd!@123456789'
sql create user user_p9 pass 'abcd!@1234567890'
sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T'
-sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' cratedb 0 is_import 0
+sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' HOST '127.0.0.1'
sql drop user user_p2
sql drop user user_p3
sql drop user user_p4
From c51c64196fe28035821e9218be5a87e9fb2621cc Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 05:20:35 +0000
Subject: [PATCH 023/105] retry case
---
tests/army/cluster/strongPassword.py | 12 ++++++++++++
tests/parallel_test/cases.task | 2 +-
tests/script/tsim/user/password.sim | 14 +++++++-------
3 files changed, 20 insertions(+), 8 deletions(-)
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index 48dbe6d512..eeda554329 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -60,6 +60,18 @@ class TDTestCase(TBase):
tdSql.error("alter user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456';", expectErrInfo="Name or password too long")
+ tdSql.execute("CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';")
+
+ tdSql.error("CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 765f646cdd..b146543301 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -11,7 +11,7 @@
#
# army-test
#
-
+,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPasswrd.py
#
# army/tools
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index 514f3d80d4..cd6c124413 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -87,7 +87,6 @@ sql create user user_p7 pass 'abcd!@1234567'
sql create user user_p8 pass 'abcd!@123456789'
sql create user user_p9 pass 'abcd!@1234567890'
sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T'
-sql_error create user user_p10 pass 'abcd!@123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345T' HOST '127.0.0.1'
sql drop user user_p2
sql drop user user_p3
sql drop user user_p4
@@ -272,12 +271,13 @@ sql create user u25 pass 'taosdata1~'
sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
-sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
-#sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+#move case with host to strongPassword.py becase tsim have a memory leak error when using Host
+sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1;
+sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
+sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1;
+sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
+sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1';
+sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
sql_error alter USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7';
From 5ce2aeca744bcdc454f0fe4d989d51d44446377a Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 05:42:52 +0000
Subject: [PATCH 024/105] retry case
---
tests/parallel_test/cases.task | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index b146543301..83ece5afed 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -11,7 +11,7 @@
#
# army-test
#
-,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPasswrd.py
+,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPassword.py
#
# army/tools
From c0c95c5c9058e99a87ab4f976d44655bafb13f27 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 05:58:07 +0000
Subject: [PATCH 025/105] retry case
---
tests/army/cluster/strongPassword.py | 28 ++++++++++++++--------------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index eeda554329..0ed32da41b 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -35,6 +35,19 @@ class TDTestCase(TBase):
tdSql.execute("alter user test pass '23456789@Abc';")
+ #move from password.sim
+ tdSql.execute("CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';")
+
+ tdSql.error("CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
+ tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+
# change setting
tdSql.execute("ALTER ALL DNODES 'enableStrongPassword' '0'")
@@ -58,20 +71,7 @@ class TDTestCase(TBase):
if os.system(cmd) != 0:
raise Exception("failed to execute system command. cmd: %s" % cmd)
- tdSql.error("alter user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456';", expectErrInfo="Name or password too long")
-
- tdSql.execute("CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';")
-
- tdSql.error("CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
+ tdSql.error("alter user test2 pass '1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456';", expectErrInfo="Name or password too long")
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
From 8fbea148264223d96df3b068868c162fd7890884 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 06:29:54 +0000
Subject: [PATCH 026/105] retry case
---
tests/army/cluster/strongPassword.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index 0ed32da41b..59922954d1 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -40,13 +40,13 @@ class TDTestCase(TBase):
tdSql.error("CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
- tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+ tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
tdSql.error("CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
- tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+ tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
- tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
+ tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
# change setting
tdSql.execute("ALTER ALL DNODES 'enableStrongPassword' '0'")
From 2baa0f97f81c8372d1ca601928712ec60165fd73 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 06:48:29 +0000
Subject: [PATCH 027/105] restore case
---
tests/parallel_test/cases.task | 1770 +++++++++++++++++++++++++++++++-
1 file changed, 1769 insertions(+), 1 deletion(-)
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 83ece5afed..94f9addfc7 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -4,22 +4,1790 @@
#unit-test
+,,n,unit-test,bash test.sh
#docs-examples test
-
+,,n,docs-examples-test,bash c.sh
+,,n,docs-examples-test,bash python.sh
+,,n,docs-examples-test,bash node.sh
+,,n,docs-examples-test,bash csharp.sh
+,,n,docs-examples-test,bash jdbc.sh
+,,n,docs-examples-test,bash rust.sh
+,,n,docs-examples-test,bash go.sh
+,,n,docs-examples-test,bash test_R.sh
#
# army-test
#
+,,y,army,./pytest.sh python3 ./test.py -f multi-level/mlevel_basic.py -N 3 -L 3 -D 2
+,,y,army,./pytest.sh python3 ./test.py -f db-encrypt/basic.py -N 3 -M 3
+,,y,army,./pytest.sh python3 ./test.py -f cluster/arbitrator.py -N 3
+,,n,army,python3 ./test.py -f storage/s3/s3Basic.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f cluster/snapshot.py -N 3 -L 3 -D 2
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_func_elapsed.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_function.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_selection_function_with_json.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_func_paramnum.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_percentile.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_resinfo.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interp.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interval.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interval_diff_tz.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/concat.py
+,,y,army,./pytest.sh python3 ./test.py -f query/function/cast.py
+,,y,army,./pytest.sh python3 ./test.py -f query/test_join.py
+,,y,army,./pytest.sh python3 ./test.py -f query/test_compare.py
+,,y,army,./pytest.sh python3 ./test.py -f query/test_case_when.py
+,,y,army,./pytest.sh python3 ./test.py -f insert/test_column_tag_boundary.py
+,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_desc.py -N 3 -L 3 -D 2
+,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_null.py
+,,y,army,./pytest.sh python3 ./test.py -f cluster/test_drop_table_by_uid.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f cluster/incSnapshot.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f cluster/clusterBasic.py -N 5
+,,y,army,./pytest.sh python3 ./test.py -f cluster/tsdbSnapshot.py -N 3 -M 3
,,y,army,./pytest.sh python3 ./test.py -f cluster/strongPassword.py
+,,y,army,./pytest.sh python3 ./test.py -f query/query_basic.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_query_accuracy.py
+,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_ts5400.py
+,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_having.py
+,,y,army,./pytest.sh python3 ./test.py -f insert/insert_basic.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f cluster/splitVgroupByLearner.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f authorith/authBasic.py -N 3
+,,n,army,python3 ./test.py -f cmdline/fullopt.py
+,,y,army,./pytest.sh python3 ./test.py -f query/show.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f alter/alterConfig.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f alter/test_alter_config.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f alter/test_alter_config.py -N 3 -M 3
+,,y,army,./pytest.sh python3 ./test.py -f query/subquery/subqueryBugs.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f storage/oneStageComp.py -N 3 -L 3 -D 1
+,,y,army,./pytest.sh python3 ./test.py -f storage/compressBasic.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f grant/grantBugs.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f query/queryBugs.py -N 3
+,,n,army,python3 ./test.py -f user/test_passwd.py
+,,y,army,./pytest.sh python3 ./test.py -f tmq/tmqBugs.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f query/fill/fill_compare_asc_desc.py
+,,y,army,./pytest.sh python3 ./test.py -f query/last/test_last.py
+,,y,army,./pytest.sh python3 ./test.py -f query/window/base.py
+,,y,army,./pytest.sh python3 ./test.py -f query/sys/tb_perf_queries_exist_test.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f query/test_having.py
+,,n,army,python3 ./test.py -f tmq/drop_lost_comsumers.py
+,,y,army,./pytest.sh python3 ./test.py -f cmdline/taosCli.py
+,,n,army,python3 ./test.py -f whole/checkErrorCode.py
#
# army/tools
#
# benchmark 64 cases
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/rest_insert_alltypes_json.py -R
+,,n,army,python3 ./test.py -f tools/benchmark/basic/taosdemoTestQueryWithJson-mixed-query.py -R
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosdemoTestInsertWithJsonStmt-otherPara.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_telnet_insert_alltypes-same-min-max.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/default_tmq_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/reuse-exist-stb.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_interlace.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt2_insert.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_offset_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/json_tag.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-sml-rest.py -R
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_auto_create_table_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/insert-json-csv.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert-table-creating-interval.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/insertMix.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-mix.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stream_function_test.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/telnet_tcp.py -R
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json-subtable.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to-continue.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_json_alltypes-interlace.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/commandline-retry.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/tmq_case.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/limit_offset_json.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/commandline-sml.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_insert_alltypes_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/sml_taosjson_insert_alltypes-same-min-max.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes-same-min-max.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/bugs.py -B
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_sample_csv_json-subtable.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json-with-error-sqlfile.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-retry-json-global.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/exportCsv.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosdemoTestQueryWithJson.py -R
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-partial-col-numpy.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json-with-sqlfile.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/query_json.py -B
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_json_alltypes.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/invalid_commandline.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_json_insert_alltypes-same-min-max.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/default_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_sample_csv_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json_doesnt_use_ts.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/taosadapter_json.py -B
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/demo.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-supplement-insert.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/custom_col_tag.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_auto_create_table_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_insert_alltypes_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_insert_alltypes-same-min-max.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-vgroups.py
+,,n,army,python3 ./test.py -f tools/benchmark/basic/taosc_insert-retry-json-stb.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_auto_create_table_json.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_telnet_alltypes.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stream-test.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/sml_taosjson_alltypes.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/commandline-single-table.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/taosc_insert_alltypes_json-partial-col.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/cloud/cloud-test.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/ws/websocket.py -R
+# taosdump 43 cases
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTest.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpDbStb.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeDouble.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedBigInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpManyCols.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpStartEndTime.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTypeVarbinary.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTypeGeometry.py
+,,n,army,python3 ./test.py -f tools/taosdump/native/taosdumpDbWithNonRoot.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpEscapedDb.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeJson.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestBasic.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedSmallInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpDbNtb.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedTinyInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeUnsignedInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeSmallInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestNanoSupport.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBigInt.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBinary.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeFloat.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpStartEndTimeLong.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestLooseMode.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeBool.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestInspect.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpInDiffType.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTest2.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/taosdump/native/taosdumpTestTypeTinyInt.py
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeDouble.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedBigInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpEscapedDb.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpPrimaryKey.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeJson.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedSmallInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedTinyInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeUnsignedInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeSmallInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBigInt.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBinary.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeFloat.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeBool.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpRetry.py -B
+,,n,army,python3 ./test.py -f tools/taosdump/ws/taosdumpTestTypeTinyInt.py -B
+
+#
+# system test
+#
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/stream_multi_agg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/stream_basic.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/scalar_function.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_session.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_state_window.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_session.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_state_window.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_session.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/at_once_interval_ext.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/max_delay_interval_ext.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/window_close_session_ext.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/partition_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/pause_resume_test.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/state_window_case.py
+#,,n,system-test,python3 ./test.py -f 8-stream/vnode_restart.py -N 4
+#,,n,system-test,python3 ./test.py -f 8-stream/snode_restart.py -N 4
+,,n,system-test,python3 ./test.py -f 8-stream/snode_restart_with_checkpoint.py -N 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/force_window_close_interp.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/force_window_close_interval.py
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_error.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_func.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_varchar.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pk_func_group.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_expr.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/project_group.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname_vgroup.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/compact-col.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tms_memleak.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stbJoin.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hint.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/para_tms.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/para_tms2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/columnLenUpdated.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_unit.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_by_col_agg.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interval_limit_opt_2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/func_to_char_timestamp.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_cache_scan.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbname.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsma2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tbnameIn.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp_extension.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqShow.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropStb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb0.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb3.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb0.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/ins_topics_test.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqMaxTopic.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqParamsTest.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqParamsTest.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqMaxGroupIds.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsumeDiscontinuousData.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqOffset.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_primary_key.py
+,,n,system-test,python3 ./test.py -f 7-tmq/tmqDropConsumer.py
+
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_stb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_stable.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/stt_blocks_check.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_null.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_null_none.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/database_pre_suf.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_str.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_math.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_time.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQuery_26.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/select_null.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/slimit.py -Q 4
+,,n,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5761.py
+,,n,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5761-scalemode.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-5712.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4233.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/like.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/match.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-28068.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_AlwaysReturnValue.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 4
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-32548.py
+,,n,system-test,python3 ./test.py -f 2-query/large_data.py
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stddev_test.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/checkpoint_info.py -N 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 8-stream/checkpoint_info2.py -N 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_multi_insert.py
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreDnode.py -N 5 -M 3 -i False
+,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreVnode.py -N 5 -M 3 -i False
+,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreMnode.py -N 5 -M 3 -i False
+,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreQnode.py -N 5 -M 3 -i False
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/create_wrong_topic.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dropDbR3ConflictTransaction.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/basic5.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/ts-4674.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-30270.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb3.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeDb4.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/subscribeStb4.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/db.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqError.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/schema.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbFilterWhere.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqCheckData.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqCheckData1.py
+#,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsumerGroup.py
+,,n,system-test,python3 ./test.py -f 7-tmq/tmqConsumerGroup.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqAlterSchema.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-1ctb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-1ctb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-1ctb-funcNFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg-mutilCtb-funcNFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb-mutilVg-mutilCtb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-1ctb-funcNFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg-mutilCtb-funcNFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqConsFromTsdb1-mutilVg-mutilCtb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqAutoCreateTbl.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDnodeRestart.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDnodeRestart1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-1ctb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdateWithConsume.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-multiCtb-snapshot0.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUpdate-multiCtb-snapshot1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDelete-1ctb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDelete-multiCtb.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropStbCtb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropNtb-snapshot0.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqDropNtb-snapshot1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf-multCtb-snapshot0.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqUdf-multCtb-snapshot1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbTagFilter-1ctb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dataFromTsdbNWal.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/dataFromTsdbNWal-multiCtb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_taosx.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts5466.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td33504.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts-5473.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts5906.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-32187.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/td-33225.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_ts4563.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td32526.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_td32471.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq_replay.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqSeekAndCommit.py
+,,n,system-test,python3 ./test.py -f 7-tmq/tmq_offset.py
+,,n,system-test,python3 ./test.py -f 7-tmq/tmqDataPrecisionUnit.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/raw_block_interface_test.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/stbTagFilter-multiCtb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmqSubscribeStb-r3.py -N 5
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq3mnodeSwitch.py -N 6 -M 3 -i True
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/tmq3mnodeSwitch.py -N 6 -M 3 -n 3 -i True
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-db-removewal.py -N 2 -n 1
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb-removewal.py -N 6 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb.py -N 2 -n 1
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-stb.py -N 6 -n 3
+#,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeTransform-db.py -N 6 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select.py -N 2 -n 1
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-duplicatedata.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-duplicatedata-false.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-ntb-select.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-select-false.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-stb-false.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-column.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-column-false.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-db.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeSplit-db-false.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 7-tmq/walRemoveLog.py -N 3
+
+,,y,system-test,./pytest.sh python3 test.py -f 7-tmq/tmqVnodeReplicate.py -M 3 -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-19201.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3404.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3581.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3311.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-3821.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-5130.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TS-5580.py
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/balance_vgroups_r1.py -N 6
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShell.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShellError.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosShellNetChk.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/telemetry.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/backquote_check.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosdMonitor.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosdNewMonitor.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/taosd_audit.py
+,,n,system-test,python3 ./test.py -f 0-others/taosdlog.py
+,,n,system-test,python3 ./test.py -f 0-others/taosdShell.py -N 5 -M 3 -Q 3
+,,n,system-test,python3 ./test.py -f 0-others/udfTest.py
+,,n,system-test,python3 ./test.py -f 0-others/udf_create.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/udf_restart_taosd.py
+,,n,system-test,python3 ./test.py -f 0-others/udf_cfg1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/udf_cfg2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/cachemodel.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/sysinfo.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_control.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_manage.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege_show.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege_all.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/fsync.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/retention_test.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/retention_test2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel_createdb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttl.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttlChangeOnWrite.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compress_tsz1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compress_tsz2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/view/non_marterial_view/test_view.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_show_table_distributed.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_show_disk_usage.py
+,,n,system-test,python3 ./test.py -f 0-others/compatibility.py
+,,n,system-test,python3 ./test.py -f 0-others/tag_index_basic.py
+,,n,system-test,python3 ./test.py -f 0-others/udfpy_main.py
+,,n,system-test,python3 ./test.py -N 3 -f 0-others/walRetention.py
+,,n,system-test,python3 ./test.py -f 0-others/wal_level_skip.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroup.py -N 3 -n 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroupWal.py -N 3 -n 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroup.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/splitVGroupWal.py -N 3 -n 3
+,,n,system-test,python3 ./test.py -f 0-others/timeRangeWise.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/delete_check.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/test_hot_refresh_configurations.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/subscribe_stream_privilege.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/empty_identifier.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show_transaction_detail.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/kill_balance_leader.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/kill_restore_dnode.py -N 5
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/persisit_config.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/qmemCtrl.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact_vgroups.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact_auto.py
+,,n,system-test,python3 ./test.py -f 0-others/dumpsdb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/compact.py -N 3
+
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_create.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_insert.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/composite_primary_key_delete.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_double.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_database.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_replica.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/influxdb_line_taosc_insert.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/opentsdb_telnet_line_taosc_insert.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/opentsdb_json_taosc_insert.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_stmt_muti_insert_query.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_stmt_set_tbname_tag.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_stable.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/alter_table.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/boundary.py
+,,n,system-test,python3 ./test.py -f 1-insert/insertWithMoreVgroup.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_comment.py
+#,,n,system-test,python3 ./test.py -f 1-insert/time_range_wise.py
+#,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/block_wise.py
+#,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/create_retentions.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/mutil_stage.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_param_ttl.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/table_param_ttl.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/update_data_muti_rows.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/db_tb_name_check.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/InsertFuturets.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_wide_column.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_column_value.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_from_csv.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_benchmark.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_1.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_3.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/rowlength64k_4.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/precisionUS.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/precisionNS.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4219.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/ts-4272.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4295.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td27388.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_ts4479.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td29793.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/insert_timestamp.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/test_td29157.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/ddl_in_sysdb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/show_tag_index.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/information_schema.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ins_filesets.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/grant.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/and_or_for_byte.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/and_or_for_byte.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/group_partition.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/db.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/db.py -N 3 -n 3 -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/histogram.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/histogram.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_and_last_row.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last+last_row.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_1.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_3.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_4.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/primary_ts_base_5.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/limit.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/logical_operators.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/logical_operators.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/orderBy.py -N 5
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaBasic.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaTest.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/smaTest.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/sma_index.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml_TS-3724.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml-TD19291.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varbinary.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sum.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sum.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/update_data.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/tb_100w_data_order.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_childtable.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/delete_normaltable.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/keep_expired.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/stmt_error.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/drop.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 1-insert/drop.py -N 3 -M 3 -i False -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/systable_func.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4382.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4403.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_td28163.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tagFilter.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts_3405_3398_3423.py -N 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ts-4348-td-27939.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/backslash_g.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/test_ts4467.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/geometry.py
+
+,,n,system-test,python3 ./test.py -f 2-query/queryQnode.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode1mnode.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode2mnode.py -N 5
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop.py -N 5 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop.py -N 5 -M 3 -i False
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3 -i False
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStopLoop.py -N 5 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateDb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateDb.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateDb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateDb.py -N 6 -M 3 -n 3
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeModifyMeta.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeModifyMeta.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateStb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateStb.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertData.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertData.py -N 6 -M 3 -n 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertDataAsync.py -N 6 -M 3
+#,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRestartDnodeInsertDataAsync.py -N 6 -M 3 -n 3
+,,n,system-test,python3 ./test.py -f 6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py -N 6 -M 3
+#,,n,system-test,python3 ./test.py -f 6-cluster/5dnode3mnodeRoll.py -N 3 -C 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeAdd1Ddnoe.py -N 7 -M 3 -C 6
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeAdd1Ddnoe.py -N 7 -M 3 -C 6 -n 3
+#,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeDrop.py -N 5
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeRecreateMnode.py -N 6 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStopFollowerLeader.py -N 5 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/5dnode3mnodeStop2Follower.py -N 5 -M 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py -N 4 -M 1
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/compactDBConflict.py -N 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 6-cluster/mnodeEncrypt.py 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 2
+
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/between.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distinct.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/varchar.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ltrim.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/rtrim.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/length.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/char_length.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/upper.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/lower.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/join2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/substr.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/union1.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/concat_ws2.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/check_tsdb.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/spread.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/hyperloglog.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/explain.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/leastsquares.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timezone.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Now.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Today.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/min.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/normal.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/not.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mode.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/countAlwaysReturnValue.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/first.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_iso8601.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/To_unixtimestamp.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/timetruncate.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/diff.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/Timediff.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/json_tag.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/top.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/bottom.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/percentile.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ceil.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/floor.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/round.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/log.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/pow.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sqrt.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sin.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cos.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tan.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arcsin.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arccos.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/arctan.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/query_cols_tags_and_or.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/nestedQueryInterval.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stablity_1.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/avg.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/elapsed.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/csum.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/mavg.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sample.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/cast.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_diff.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/unique.py -Q 4
+#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/stateduration.py -Q 4
+#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_stateduration.py -Q 4
+#,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/statecount.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tail.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/ttl_comment.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_count.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_max.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_min.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_sum.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_spread.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_apercentile.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_avg.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/distribute_agg_stddev.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/twa.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/irate.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/function_null.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/count_partition.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_partition.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/partition_limit_interval.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/max_min_last_interval.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row_interval.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/last_row.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/tsbsQuery.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/sml.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/interp.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/case_when.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/insert_select.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/out_of_order.py -R
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/blockSMA.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/projectionDesc.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/odbc.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/fill_with_group.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/state_window.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-21561.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f 99-TDcase/TD-20582.py
+,,n,system-test,python3 ./test.py -f eco-system/meta/database/keep_time_offset.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 2
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 3
+,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/operator.py -Q 4
+,,y,system-test,./pytest.sh python3 ./test.py -f eco-system/manager/schema_change.py -N 3 -M 3
#tsim test
+,,y,script,./test.sh -f tsim/query/timeline.sim
+,,y,script,./test.sh -f tsim/join/join.sim
+,,y,script,./test.sh -f tsim/tmq/basic2Of2ConsOverlap.sim
+,,y,script,./test.sh -f tsim/parser/where.sim
+,,y,script,./test.sh -f tsim/parser/join_manyblocks.sim
+,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v1_leader.sim
+,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v1_follower.sim
+,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v2.sim
+,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica3_v3.sim
+,,y,script,./test.sh -f tsim/parser/limit1.sim
+,,y,script,./test.sh -f tsim/parser/union.sim
+,,y,script,./test.sh -f tsim/parser/commit.sim
+,,y,script,./test.sh -f tsim/parser/nestquery.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError7.sim
+,,y,script,./test.sh -f tsim/parser/groupby.sim
+,,y,script,./test.sh -f tsim/parser/sliding.sim
+,,y,script,./test.sh -f tsim/dnode/balance2.sim
+,,y,script,./test.sh -f tsim/vnode/replica3_repeat.sim
+,,y,script,./test.sh -f tsim/parser/col_arithmetic_operation.sim
+#,,y,script,./test.sh -f tsim/trans/create_db.sim
+,,y,script,./test.sh -f tsim/dnode/balance3.sim
+,,y,script,./test.sh -f tsim/vnode/replica3_many.sim
+,,y,script,./test.sh -f tsim/stable/metrics_idx.sim
+# ,,y,script,./test.sh -f tsim/db/alter_replica_13.sim
+,,y,script,./test.sh -f tsim/sync/3Replica1VgElect.sim
+,,y,script,./test.sh -f tsim/sync/3Replica5VgElect.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError6.sim
+,,y,script,./test.sh -f tsim/user/basic.sim
,,y,script,./test.sh -f tsim/user/password.sim
+,,y,script,./test.sh -f tsim/user/whitelist.sim
+,,y,script,./test.sh -f tsim/user/privilege_db.sim
+,,y,script,./test.sh -f tsim/user/privilege_sysinfo.sim
+,,y,script,./test.sh -f tsim/user/privilege_topic.sim
+,,y,script,./test.sh -f tsim/user/privilege_table.sim
+,,y,script,./test.sh -f tsim/user/privilege_create_db.sim
+,,y,script,./test.sh -f tsim/db/alter_option.sim
+,,y,script,./test.sh -f tsim/db/dnodelist.sim
+# ,,y,script,./test.sh -f tsim/db/alter_replica_31.sim
+,,y,script,./test.sh -f tsim/db/basic1.sim
+,,y,script,./test.sh -f tsim/db/basic2.sim
+,,y,script,./test.sh -f tsim/db/basic3.sim
+,,y,script,./test.sh -f tsim/db/basic4.sim
+,,y,script,./test.sh -f tsim/db/basic5.sim
+,,y,script,./test.sh -f tsim/db/basic6.sim
+,,y,script,./test.sh -f tsim/db/commit.sim
+,,y,script,./test.sh -f tsim/db/create_all_options.sim
+,,y,script,./test.sh -f tsim/db/delete_reuse1.sim
+,,y,script,./test.sh -f tsim/db/delete_reuse2.sim
+,,y,script,./test.sh -f tsim/db/delete_reusevnode.sim
+,,y,script,./test.sh -f tsim/db/delete_reusevnode2.sim
+,,y,script,./test.sh -f tsim/db/delete_writing1.sim
+,,y,script,./test.sh -f tsim/db/delete_writing2.sim
+,,y,script,./test.sh -f tsim/db/error1.sim
+,,y,script,./test.sh -f tsim/db/keep.sim
+,,y,script,./test.sh -f tsim/db/len.sim
+,,y,script,./test.sh -f tsim/db/repeat.sim
+,,y,script,./test.sh -f tsim/db/show_create_db.sim
+,,y,script,./test.sh -f tsim/db/show_create_table.sim
+,,y,script,./test.sh -f tsim/db/tables.sim
+,,y,script,./test.sh -f tsim/db/taosdlog.sim
+,,y,script,./test.sh -f tsim/db/table_prefix_suffix.sim
+,,y,script,./test.sh -f tsim/dnode/balance_replica1.sim
+,,y,script,./test.sh -f tsim/dnode/balance_replica3.sim
+,,y,script,./test.sh -f tsim/dnode/balance1.sim
+,,y,script,./test.sh -f tsim/dnode/balancex.sim
+,,y,script,./test.sh -f tsim/dnode/create_dnode.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_mnode.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_qnode_snode.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_vnode_replica1.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_vnode_replica3.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_multi_vnode_replica1.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_has_multi_vnode_replica3.sim
+,,y,script,./test.sh -f tsim/dnode/drop_dnode_force.sim
+,,y,script,./test.sh -f tsim/dnode/offline_reason.sim
+,,y,script,./test.sh -f tsim/dnode/redistribute_vgroup_replica1.sim
+,,y,script,./test.sh -f tsim/dnode/vnode_clean.sim
+,,y,script,./test.sh -f tsim/dnode/use_dropped_dnode.sim
+,,y,script,./test.sh -f tsim/dnode/split_vgroup_replica1.sim
+,,y,script,./test.sh -f tsim/dnode/split_vgroup_replica3.sim
+,,y,script,./test.sh -f tsim/import/basic.sim
+,,y,script,./test.sh -f tsim/import/commit.sim
+,,y,script,./test.sh -f tsim/import/large.sim
+,,y,script,./test.sh -f tsim/import/replica1.sim
+,,y,script,./test.sh -f tsim/insert/backquote.sim
+,,y,script,./test.sh -f tsim/insert/basic.sim
+,,y,script,./test.sh -f tsim/insert/basic0.sim
+,,y,script,./test.sh -f tsim/insert/basic1.sim
+,,y,script,./test.sh -f tsim/insert/basic2.sim
+,,y,script,./test.sh -f tsim/insert/commit-merge0.sim
+,,y,script,./test.sh -f tsim/insert/insert_drop.sim
+,,y,script,./test.sh -f tsim/insert/insert_select.sim
+,,y,script,./test.sh -f tsim/insert/null.sim
+,,y,script,./test.sh -f tsim/insert/query_block1_file.sim
+,,y,script,./test.sh -f tsim/insert/query_block1_memory.sim
+,,y,script,./test.sh -f tsim/insert/query_block2_file.sim
+,,y,script,./test.sh -f tsim/insert/query_block2_memory.sim
+,,y,script,./test.sh -f tsim/insert/query_file_memory.sim
+,,y,script,./test.sh -f tsim/insert/query_multi_file.sim
+,,y,script,./test.sh -f tsim/insert/tcp.sim
+,,y,script,./test.sh -f tsim/insert/update0.sim
+,,y,script,./test.sh -f tsim/insert/delete0.sim
+,,y,script,./test.sh -f tsim/insert/update1_sort_merge.sim
+,,y,script,./test.sh -f tsim/insert/update2.sim
+,,y,script,./test.sh -f tsim/insert/insert_stb.sim
+,,y,script,./test.sh -f tsim/parser/alter__for_community_version.sim
+,,y,script,./test.sh -f tsim/parser/alter_column.sim
+,,y,script,./test.sh -f tsim/parser/alter_stable.sim
+,,y,script,./test.sh -f tsim/parser/alter.sim
+,,y,script,./test.sh -f tsim/parser/alter1.sim
+,,y,script,./test.sh -f tsim/parser/auto_create_tb_drop_tb.sim
+,,y,script,./test.sh -f tsim/parser/auto_create_tb.sim
+,,y,script,./test.sh -f tsim/parser/between_and.sim
+,,y,script,./test.sh -f tsim/parser/binary_escapeCharacter.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_bigint.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_bool.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_double.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_float.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_int.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_smallint.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_tinyint.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_unsign.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_uint.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_timestamp.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_varchar.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_nchar.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_varbinary.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_json.sim
+,,y,script,./test.sh -f tsim/parser/columnValue_geometry.sim
+,,y,script,./test.sh -f tsim/parser/condition.sim
+,,y,script,./test.sh -f tsim/parser/condition_scl.sim
+,,y,script,./test.sh -f tsim/parser/constCol.sim
+,,y,script,./test.sh -f tsim/parser/create_db.sim
+,,y,script,./test.sh -f tsim/parser/create_mt.sim
+,,y,script,./test.sh -f tsim/parser/create_tb_with_tag_name.sim
+,,y,script,./test.sh -f tsim/parser/create_tb.sim
+,,y,script,./test.sh -f tsim/parser/dbtbnameValidate.sim
+,,y,script,./test.sh -f tsim/parser/distinct.sim
+,,y,script,./test.sh -f tsim/parser/fill_us.sim
+,,y,script,./test.sh -f tsim/parser/fill.sim
+,,y,script,./test.sh -f tsim/parser/first_last.sim
+,,y,script,./test.sh -f tsim/parser/fill_stb.sim
+,,y,script,./test.sh -f tsim/parser/interp.sim
+,,y,script,./test.sh -f tsim/parser/fourArithmetic-basic.sim
+,,y,script,./test.sh -f tsim/parser/function.sim
+,,y,script,./test.sh -f tsim/parser/groupby-basic.sim
+,,y,script,./test.sh -f tsim/parser/having_child.sim
+,,y,script,./test.sh -f tsim/parser/having.sim
+,,y,script,./test.sh -f tsim/parser/import_commit1.sim
+,,y,script,./test.sh -f tsim/parser/import_commit2.sim
+,,y,script,./test.sh -f tsim/parser/import_commit3.sim
+,,y,script,./test.sh -f tsim/parser/import_file.sim
+,,y,script,./test.sh -f tsim/parser/import.sim
+,,y,script,./test.sh -f tsim/parser/insert_multiTbl.sim
+,,y,script,./test.sh -f tsim/parser/insert_tb.sim
+,,y,script,./test.sh -f tsim/parser/join_multitables.sim
+,,y,script,./test.sh -f tsim/parser/join_multivnode.sim
+,,y,script,./test.sh -f tsim/parser/join.sim
+,,y,script,./test.sh -f tsim/parser/last_cache.sim
+,,y,script,./test.sh -f tsim/parser/last_both.sim
+,,y,script,./test.sh -f tsim/parser/last_groupby.sim
+,,y,script,./test.sh -f tsim/parser/lastrow.sim
+,,y,script,./test.sh -f tsim/parser/lastrow2.sim
+,,y,script,./test.sh -f tsim/parser/like.sim
+,,y,script,./test.sh -f tsim/parser/limit.sim
+,,y,script,./test.sh -f tsim/parser/mixed_blocks.sim
+,,y,script,./test.sh -f tsim/parser/nchar.sim
+,,y,script,./test.sh -f tsim/parser/null_char.sim
+,,y,script,./test.sh -f tsim/parser/precision_ns.sim
+,,y,script,./test.sh -f tsim/parser/projection_limit_offset.sim
+,,y,script,./test.sh -f tsim/parser/regex.sim
+,,y,script,./test.sh -f tsim/parser/regressiontest.sim
+,,y,script,./test.sh -f tsim/parser/select_across_vnodes.sim
+,,y,script,./test.sh -f tsim/parser/select_distinct_tag.sim
+,,y,script,./test.sh -f tsim/parser/select_from_cache_disk.sim
+,,y,script,./test.sh -f tsim/parser/select_with_tags.sim
+,,y,script,./test.sh -f tsim/parser/selectResNum.sim
+,,y,script,./test.sh -f tsim/parser/set_tag_vals.sim
+,,y,script,./test.sh -f tsim/parser/single_row_in_tb.sim
+,,y,script,./test.sh -f tsim/parser/slimit_alter_tags.sim
+,,y,script,./test.sh -f tsim/parser/slimit.sim
+,,y,script,./test.sh -f tsim/parser/slimit1.sim
+,,y,script,./test.sh -f tsim/parser/stableOp.sim
+,,y,script,./test.sh -f tsim/parser/tags_dynamically_specifiy.sim
+,,y,script,./test.sh -f tsim/parser/tags_filter.sim
+,,y,script,./test.sh -f tsim/parser/tbnameIn.sim
+,,y,script,./test.sh -f tsim/parser/timestamp.sim
+,,y,script,./test.sh -f tsim/parser/top_groupby.sim
+,,y,script,./test.sh -f tsim/parser/topbot.sim
+,,y,script,./test.sh -f tsim/parser/union_sysinfo.sim
+,,y,script,./test.sh -f tsim/parser/slimit_limit.sim
+,,y,script,./test.sh -f tsim/parser/table_merge_limit.sim
+,,y,script,./test.sh -f tsim/query/tagLikeFilter.sim
+,,y,script,./test.sh -f tsim/query/charScalarFunction.sim
+,,y,script,./test.sh -f tsim/query/explain.sim
+,,y,script,./test.sh -f tsim/query/interval-offset.sim
+,,y,script,./test.sh -f tsim/query/interval.sim
+,,y,script,./test.sh -f tsim/query/scalarFunction.sim
+,,y,script,./test.sh -f tsim/query/scalarNull.sim
+,,y,script,./test.sh -f tsim/query/session.sim
+,,y,script,./test.sh -f tsim/query/udf.sim
+,,n,script,./test.sh -f tsim/query/udfpy.sim
+,,y,script,./test.sh -f tsim/query/udf_with_const.sim
+,,y,script,./test.sh -f tsim/query/join_interval.sim
+,,y,script,./test.sh -f tsim/query/join_pk.sim
+,,y,script,./test.sh -f tsim/query/join_order.sim
+,,y,script,./test.sh -f tsim/query/count_spread.sim
+,,y,script,./test.sh -f tsim/query/unionall_as_table.sim
+,,y,script,./test.sh -f tsim/query/multi_order_by.sim
+,,y,script,./test.sh -f tsim/query/sys_tbname.sim
+,,y,script,./test.sh -f tsim/query/sort-pre-cols.sim
+,,y,script,./test.sh -f tsim/query/groupby.sim
+,,y,script,./test.sh -f tsim/query/groupby_distinct.sim
+,,y,script,./test.sh -f tsim/query/event.sim
+,,y,script,./test.sh -f tsim/query/forceFill.sim
+,,y,script,./test.sh -f tsim/query/emptyTsRange.sim
+,,y,script,./test.sh -f tsim/query/emptyTsRange_scl.sim
+,,y,script,./test.sh -f tsim/query/partitionby.sim
+,,y,script,./test.sh -f tsim/query/tableCount.sim
+,,y,script,./test.sh -f tsim/query/show_db_table_kind.sim
+,,y,script,./test.sh -f tsim/query/bi_star_table.sim
+,,y,script,./test.sh -f tsim/query/bi_tag_scan.sim
+,,y,script,./test.sh -f tsim/query/bi_tbname_col.sim
+,,y,script,./test.sh -f tsim/query/tag_scan.sim
+,,y,script,./test.sh -f tsim/query/nullColSma.sim
+,,y,script,./test.sh -f tsim/query/bug3398.sim
+,,y,script,./test.sh -f tsim/query/explain_tsorder.sim
+,,y,script,./test.sh -f tsim/query/apercentile.sim
+,,y,script,./test.sh -f tsim/query/query_count0.sim
+,,y,script,./test.sh -f tsim/query/query_count_sliding0.sim
+,,y,script,./test.sh -f tsim/query/union_precision.sim
+,,y,script,./test.sh -f tsim/qnode/basic1.sim
+,,y,script,./test.sh -f tsim/snode/basic1.sim
+,,y,script,./test.sh -f tsim/mnode/basic1.sim
+,,y,script,./test.sh -f tsim/mnode/basic2.sim
+#,,y,script,./test.sh -f tsim/mnode/basic3.sim
+,,y,script,./test.sh -f tsim/mnode/basic4.sim
+,,y,script,./test.sh -f tsim/mnode/basic5.sim
+,,y,script,./test.sh -f tsim/mnode/basic6.sim
+,,y,script,./test.sh -f tsim/show/basic.sim
+,,y,script,./test.sh -f tsim/table/autocreate.sim
+,,y,script,./test.sh -f tsim/table/basic1.sim
+,,y,script,./test.sh -f tsim/table/basic2.sim
+,,y,script,./test.sh -f tsim/table/basic3.sim
+,,y,script,./test.sh -f tsim/table/bigint.sim
+,,y,script,./test.sh -f tsim/table/binary.sim
+,,y,script,./test.sh -f tsim/table/bool.sim
+,,y,script,./test.sh -f tsim/table/column_name.sim
+,,y,script,./test.sh -f tsim/table/column_num.sim
+,,y,script,./test.sh -f tsim/table/column_value.sim
+,,y,script,./test.sh -f tsim/table/column2.sim
+,,y,script,./test.sh -f tsim/table/createmulti.sim
+,,y,script,./test.sh -f tsim/table/date.sim
+,,y,script,./test.sh -f tsim/table/db.table.sim
+,,y,script,./test.sh -f tsim/table/delete_reuse1.sim
+,,y,script,./test.sh -f tsim/table/delete_reuse2.sim
+,,y,script,./test.sh -f tsim/table/delete_writing.sim
+,,y,script,./test.sh -f tsim/table/describe.sim
+,,y,script,./test.sh -f tsim/table/double.sim
+,,y,script,./test.sh -f tsim/table/float.sim
+,,y,script,./test.sh -f tsim/table/hash.sim
+,,y,script,./test.sh -f tsim/table/int.sim
+,,y,script,./test.sh -f tsim/table/limit.sim
+,,y,script,./test.sh -f tsim/table/smallint.sim
+,,y,script,./test.sh -f tsim/table/table_len.sim
+,,y,script,./test.sh -f tsim/table/table.sim
+,,y,script,./test.sh -f tsim/table/tinyint.sim
+,,y,script,./test.sh -f tsim/table/vgroup.sim
+,,n,script,./test.sh -f tsim/stream/basic0.sim -g
+,,y,script,./test.sh -f tsim/stream/basic1.sim
+,,y,script,./test.sh -f tsim/stream/basic2.sim
+,,y,script,./test.sh -f tsim/stream/basic3.sim
+,,y,script,./test.sh -f tsim/stream/basic4.sim
+,,y,script,./test.sh -f tsim/stream/basic5.sim
+,,y,script,./test.sh -f tsim/stream/tag.sim
+,,y,script,./test.sh -f tsim/stream/snodeCheck.sim
+,,y,script,./test.sh -f tsim/stream/concurrentcheckpt.sim
+,,y,script,./test.sh -f tsim/stream/checkpointInterval0.sim
+,,y,script,./test.sh -f tsim/stream/checkStreamSTable1.sim
+,,y,script,./test.sh -f tsim/stream/checkStreamSTable.sim
+,,y,script,./test.sh -f tsim/stream/count0.sim
+,,y,script,./test.sh -f tsim/stream/count1.sim
+,,y,script,./test.sh -f tsim/stream/count2.sim
+,,y,script,./test.sh -f tsim/stream/count3.sim
+,,y,script,./test.sh -f tsim/stream/countSliding0.sim
+,,y,script,./test.sh -f tsim/stream/countSliding1.sim
+,,y,script,./test.sh -f tsim/stream/countSliding2.sim
+,,y,script,./test.sh -f tsim/stream/deleteInterval.sim
+,,y,script,./test.sh -f tsim/stream/deleteScalar.sim
+,,y,script,./test.sh -f tsim/stream/deleteSession.sim
+,,y,script,./test.sh -f tsim/stream/deleteState.sim
+,,y,script,./test.sh -f tsim/stream/distributeInterval0.sim
+,,y,script,./test.sh -f tsim/stream/distributeIntervalRetrive0.sim
+,,y,script,./test.sh -f tsim/stream/distributeMultiLevelInterval0.sim
+,,y,script,./test.sh -f tsim/stream/distributeSession0.sim
+,,y,script,./test.sh -f tsim/stream/drop_stream.sim
+,,y,script,./test.sh -f tsim/stream/event0.sim
+,,y,script,./test.sh -f tsim/stream/event1.sim
+,,y,script,./test.sh -f tsim/stream/event2.sim
+,,y,script,./test.sh -f tsim/stream/fillHistoryBasic1.sim
+,,y,script,./test.sh -f tsim/stream/fillHistoryBasic2.sim
+,,y,script,./test.sh -f tsim/stream/fillHistoryBasic3.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalDelete0.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalDelete1.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalLinear.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalPartitionBy.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalPrevNext1.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalPrevNext.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalRange.sim
+,,y,script,./test.sh -f tsim/stream/fillIntervalValue.sim
+,,y,script,./test.sh -f tsim/stream/ignoreCheckUpdate.sim
+,,y,script,./test.sh -f tsim/stream/ignoreExpiredData.sim
+,,y,script,./test.sh -f tsim/stream/partitionby1.sim
+,,y,script,./test.sh -f tsim/stream/partitionbyColumnInterval.sim
+,,y,script,./test.sh -f tsim/stream/partitionbyColumnOther.sim
+,,y,script,./test.sh -f tsim/stream/partitionbyColumnSession.sim
+,,y,script,./test.sh -f tsim/stream/partitionbyColumnState.sim
+,,y,script,./test.sh -f tsim/stream/partitionby.sim
+,,y,script,./test.sh -f tsim/stream/pauseAndResume.sim
+,,y,script,./test.sh -f tsim/stream/schedSnode.sim
+,,y,script,./test.sh -f tsim/stream/session0.sim
+,,y,script,./test.sh -f tsim/stream/session1.sim
+,,y,script,./test.sh -f tsim/stream/sliding.sim
+,,y,script,./test.sh -f tsim/stream/state0.sim
+,,y,script,./test.sh -f tsim/stream/state1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpDelete0.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpDelete1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpDelete2.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpError.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpForceWindowClose.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpForceWindowClose1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpFwcError.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpHistory.sim
+#,,y,script,./test.sh -f tsim/stream/streamInterpHistory1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpLarge.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpLinear0.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpNext0.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpOther.sim
+#,,y,script,./test.sh -f tsim/stream/streamInterpOther1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPartitionBy0.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPartitionBy1.sim
+#,,y,script,./test.sh -f tsim/stream/streamInterpPrev0.sim
+#,,y,script,./test.sh -f tsim/stream/streamInterpPrev1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey0.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey2.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpPrimaryKey3.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpUpdate.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpUpdate1.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpUpdate2.sim
+,,y,script,./test.sh -f tsim/stream/streamInterpValue0.sim
+,,y,script,./test.sh -f tsim/stream/streamPrimaryKey0.sim
+,,y,script,./test.sh -f tsim/stream/streamPrimaryKey1.sim
+,,y,script,./test.sh -f tsim/stream/streamPrimaryKey2.sim
+,,y,script,./test.sh -f tsim/stream/streamPrimaryKey3.sim
+,,y,script,./test.sh -f tsim/stream/streamTwaError.sim
+,,y,script,./test.sh -f tsim/stream/streamTwaFwcFill.sim
+,,y,script,./test.sh -f tsim/stream/streamTwaFwcFillPrimaryKey.sim
+,,y,script,./test.sh -f tsim/stream/streamTwaFwcIntervalPrimaryKey.sim
+,,y,script,./test.sh -f tsim/stream/streamTwaInterpFwc.sim
+,,y,script,./test.sh -f tsim/stream/triggerInterval0.sim
+,,y,script,./test.sh -f tsim/stream/triggerSession0.sim
+,,y,script,./test.sh -f tsim/stream/udTableAndCol0.sim
+,,y,script,./test.sh -f tsim/stream/udTableAndTag0.sim
+,,y,script,./test.sh -f tsim/stream/udTableAndTag1.sim
+,,y,script,./test.sh -f tsim/stream/udTableAndTag2.sim
+,,y,script,./test.sh -f tsim/stream/windowClose.sim
+,,y,script,./test.sh -f tsim/trans/lossdata1.sim
+,,y,script,./test.sh -f tsim/tmq/basic1.sim
+,,y,script,./test.sh -f tsim/tmq/basic2.sim
+,,y,script,./test.sh -f tsim/tmq/basic3.sim
+,,y,script,./test.sh -f tsim/tmq/basic4.sim
+,,y,script,./test.sh -f tsim/tmq/basic1Of2Cons.sim
+,,y,script,./test.sh -f tsim/tmq/basic2Of2Cons.sim
+,,y,script,./test.sh -f tsim/tmq/basic3Of2Cons.sim
+,,y,script,./test.sh -f tsim/tmq/basic4Of2Cons.sim
+,,y,script,./test.sh -f tsim/tmq/topic.sim
+,,y,script,./test.sh -f tsim/tmq/snapshot.sim
+,,y,script,./test.sh -f tsim/tmq/snapshot1.sim
+,,y,script,./test.sh -f tsim/stable/alter_comment.sim
+,,y,script,./test.sh -f tsim/stable/alter_count.sim
+,,y,script,./test.sh -f tsim/stable/alter_import.sim
+,,y,script,./test.sh -f tsim/stable/alter_insert1.sim
+,,y,script,./test.sh -f tsim/stable/alter_insert2.sim
+,,y,script,./test.sh -f tsim/stable/alter_metrics.sim
+,,y,script,./test.sh -f tsim/stable/column_add.sim
+,,y,script,./test.sh -f tsim/stable/column_drop.sim
+,,y,script,./test.sh -f tsim/stable/column_modify.sim
+,,y,script,./test.sh -f tsim/stable/disk.sim
+,,y,script,./test.sh -f tsim/stable/dnode3.sim
+,,y,script,./test.sh -f tsim/stable/metrics.sim
+,,y,script,./test.sh -f tsim/stable/refcount.sim
+,,y,script,./test.sh -f tsim/stable/tag_add.sim
+,,y,script,./test.sh -f tsim/stable/tag_drop.sim
+,,y,script,./test.sh -f tsim/stable/tag_filter.sim
+,,y,script,./test.sh -f tsim/stable/tag_modify.sim
+,,y,script,./test.sh -f tsim/stable/tag_rename.sim
+,,y,script,./test.sh -f tsim/stable/values.sim
+,,y,script,./test.sh -f tsim/stable/vnode3.sim
+,,n,script,./test.sh -f tsim/sma/drop_sma.sim
+,,y,script,./test.sh -f tsim/sma/sma_leak.sim
+,,y,script,./test.sh -f tsim/sma/tsmaCreateInsertQuery.sim
+,,y,script,./test.sh -f tsim/sma/rsmaCreateInsertQuery.sim
+,,y,script,./test.sh -f tsim/sma/rsmaCreateInsertQueryDelete.sim
+
+### refactor stream backend, open case after rsma refactored
+#,,y,script,./test.sh -f tsim/sma/rsmaPersistenceRecovery.sim
+,,y,script,./test.sh -f tsim/sync/vnodesnapshot-rsma-test.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError1.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError2.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError3.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError4.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError5.sim
+,,n,script,./test.sh -f tsim/valgrind/checkError8.sim
+,,n,script,./test.sh -f tsim/valgrind/checkUdf.sim
+,,y,script,./test.sh -f tsim/vnode/replica3_basic.sim
+,,y,script,./test.sh -f tsim/vnode/replica3_vgroup.sim
+,,y,script,./test.sh -f tsim/vnode/replica3_import.sim
+,,y,script,./test.sh -f tsim/vnode/stable_balance_replica1.sim
+,,y,script,./test.sh -f tsim/vnode/stable_dnode2_stop.sim
+,,y,script,./test.sh -f tsim/vnode/stable_dnode2.sim
+,,y,script,./test.sh -f tsim/vnode/stable_dnode3.sim
+,,y,script,./test.sh -f tsim/vnode/stable_replica3_dnode6.sim
+,,y,script,./test.sh -f tsim/vnode/stable_replica3_vnode3.sim
+,,y,script,./test.sh -f tsim/sync/oneReplica1VgElect.sim
+,,y,script,./test.sh -f tsim/sync/oneReplica5VgElect.sim
+,,y,script,./test.sh -f tsim/catalog/alterInCurrent.sim
+,,y,script,./test.sh -f tsim/scalar/in.sim
+,,y,script,./test.sh -f tsim/scalar/scalar.sim
+,,y,script,./test.sh -f tsim/scalar/filter.sim
+,,y,script,./test.sh -f tsim/scalar/caseWhen.sim
+,,y,script,./test.sh -f tsim/scalar/tsConvert.sim
+,,y,script,./test.sh -f tsim/alter/cached_schema_after_alter.sim
+,,y,script,./test.sh -f tsim/alter/dnode.sim
+,,y,script,./test.sh -f tsim/alter/table.sim
+,,y,script,./test.sh -f tsim/cache/new_metrics.sim
+,,y,script,./test.sh -f tsim/cache/restart_table.sim
+,,y,script,./test.sh -f tsim/cache/restart_metrics.sim
+,,y,script,./test.sh -f tsim/column/commit.sim
+,,y,script,./test.sh -f tsim/column/metrics.sim
+,,y,script,./test.sh -f tsim/column/table.sim
+,,y,script,./test.sh -f tsim/compress/commitlog.sim
+,,y,script,./test.sh -f tsim/compress/compress2.sim
+,,y,script,./test.sh -f tsim/compress/compress.sim
+,,y,script,./test.sh -f tsim/compress/compress_col.sim
+,,y,script,./test.sh -f tsim/compress/uncompress.sim
+,,y,script,./test.sh -f tsim/compute/avg.sim
+,,y,script,./test.sh -f tsim/compute/block_dist.sim
+,,y,script,./test.sh -f tsim/compute/bottom.sim
+,,y,script,./test.sh -f tsim/compute/count.sim
+,,y,script,./test.sh -f tsim/compute/diff.sim
+,,y,script,./test.sh -f tsim/compute/diff2.sim
+,,y,script,./test.sh -f tsim/compute/first.sim
+,,y,script,./test.sh -f tsim/compute/interval.sim
+,,y,script,./test.sh -f tsim/compute/interval1.sim
+,,y,script,./test.sh -f tsim/compute/last_row.sim
+,,y,script,./test.sh -f tsim/compute/last.sim
+,,y,script,./test.sh -f tsim/compute/leastsquare.sim
+,,y,script,./test.sh -f tsim/compute/max.sim
+,,y,script,./test.sh -f tsim/compute/min.sim
+,,y,script,./test.sh -f tsim/compute/null.sim
+,,y,script,./test.sh -f tsim/compute/percentile.sim
+,,y,script,./test.sh -f tsim/compute/stddev.sim
+,,y,script,./test.sh -f tsim/compute/sum.sim
+,,y,script,./test.sh -f tsim/compute/top.sim
+,,y,script,./test.sh -f tsim/compute/disk_usage.sim
+,,y,script,./test.sh -f tsim/field/2.sim
+,,y,script,./test.sh -f tsim/field/3.sim
+,,y,script,./test.sh -f tsim/field/4.sim
+,,y,script,./test.sh -f tsim/field/5.sim
+,,y,script,./test.sh -f tsim/field/6.sim
+,,y,script,./test.sh -f tsim/field/binary.sim
+,,y,script,./test.sh -f tsim/field/bigint.sim
+,,y,script,./test.sh -f tsim/field/bool.sim
+,,y,script,./test.sh -f tsim/field/double.sim
+,,y,script,./test.sh -f tsim/field/float.sim
+,,y,script,./test.sh -f tsim/field/int.sim
+,,y,script,./test.sh -f tsim/field/single.sim
+,,y,script,./test.sh -f tsim/field/smallint.sim
+,,y,script,./test.sh -f tsim/field/tinyint.sim
+,,y,script,./test.sh -f tsim/field/unsigined_bigint.sim
+,,y,script,./test.sh -f tsim/vector/metrics_field.sim
+,,y,script,./test.sh -f tsim/vector/metrics_mix.sim
+,,y,script,./test.sh -f tsim/vector/metrics_query.sim
+,,y,script,./test.sh -f tsim/vector/metrics_tag.sim
+,,y,script,./test.sh -f tsim/vector/metrics_time.sim
+,,y,script,./test.sh -f tsim/vector/multi.sim
+,,y,script,./test.sh -f tsim/vector/single.sim
+,,y,script,./test.sh -f tsim/vector/table_field.sim
+,,y,script,./test.sh -f tsim/vector/table_mix.sim
+,,y,script,./test.sh -f tsim/vector/table_query.sim
+,,y,script,./test.sh -f tsim/vector/table_time.sim
+,,y,script,./test.sh -f tsim/wal/kill.sim
+,,y,script,./test.sh -f tsim/tag/3.sim
+,,y,script,./test.sh -f tsim/tag/4.sim
+,,y,script,./test.sh -f tsim/tag/5.sim
+,,y,script,./test.sh -f tsim/tag/6.sim
+,,y,script,./test.sh -f tsim/tag/add.sim
+,,y,script,./test.sh -f tsim/tag/bigint.sim
+,,y,script,./test.sh -f tsim/tag/binary_binary.sim
+,,y,script,./test.sh -f tsim/tag/binary.sim
+,,y,script,./test.sh -f tsim/tag/bool_binary.sim
+,,y,script,./test.sh -f tsim/tag/bool_int.sim
+,,y,script,./test.sh -f tsim/tag/bool.sim
+,,y,script,./test.sh -f tsim/tag/change.sim
+,,y,script,./test.sh -f tsim/tag/column.sim
+,,y,script,./test.sh -f tsim/tag/commit.sim
+,,y,script,./test.sh -f tsim/tag/create.sim
+,,y,script,./test.sh -f tsim/tag/delete.sim
+,,y,script,./test.sh -f tsim/tag/double.sim
+,,y,script,./test.sh -f tsim/tag/filter.sim
+,,y,script,./test.sh -f tsim/tag/float.sim
+,,y,script,./test.sh -f tsim/tag/int_binary.sim
+,,y,script,./test.sh -f tsim/tag/int_float.sim
+,,y,script,./test.sh -f tsim/tag/int.sim
+,,y,script,./test.sh -f tsim/tag/set.sim
+,,y,script,./test.sh -f tsim/tag/smallint.sim
+,,y,script,./test.sh -f tsim/tag/tinyint.sim
+,,y,script,./test.sh -f tsim/tag/drop_tag.sim
+,,y,script,./test.sh -f tsim/tag/tbNameIn.sim
+,,y,script,./test.sh -f tsim/tag/change_multi_tag.sim
+,,y,script,./test.sh -f tmp/monitor.sim
+,,y,script,./test.sh -f tsim/tagindex/add_index.sim
+,,n,script,./test.sh -f tsim/tagindex/sma_and_tag_index.sim
+,,y,script,./test.sh -f tsim/tagindex/indexOverflow.sim
+,,y,script,./test.sh -f tsim/view/view.sim
+,,y,script,./test.sh -f tsim/query/cache_last.sim
+,,y,script,./test.sh -f tsim/query/const.sim
+,,y,script,./test.sh -f tsim/query/nestedJoinView.sim
+
+
+
+#develop test
+,,n,develop-test,python3 ./test.py -f 2-query/table_count_scan.py
+,,n,develop-test,python3 ./test.py -f 2-query/pseudo_column.py
+,,n,develop-test,python3 ./test.py -f 2-query/ts-range.py
+,,n,develop-test,python3 ./test.py -f 2-query/tag_scan.py
+,,n,develop-test,python3 ./test.py -f 2-query/show_create_db.py
From e6e1d3ff1c58c2cdd5d4667f36d2cd831b41d9d9 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 17 Feb 2025 16:35:08 +0800
Subject: [PATCH 028/105] feat/TS-5927-long-password-fix-case
---
source/libs/parser/src/parAstCreater.c | 9 ++++++++-
tests/army/cluster/strongPassword.py | 13 -------------
tests/script/tsim/user/password.sim | 13 ++++++-------
3 files changed, 14 insertions(+), 21 deletions(-)
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index fe6b87f79f..d6b976c077 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -3062,7 +3062,14 @@ static int32_t fillIpRangesFromWhiteList(SAstCreateContext* pCxt, SNodeList* pIp
}
SNode* addCreateUserStmtWhiteList(SAstCreateContext* pCxt, SNode* pCreateUserStmt, SNodeList* pIpRangesNodeList) {
- if (NULL == pCreateUserStmt || NULL == pIpRangesNodeList) {
+ if (NULL == pCreateUserStmt) {
+ if (pIpRangesNodeList != NULL) {
+ nodesDestroyList(pIpRangesNodeList);
+ }
+ return NULL;
+ }
+
+ if (NULL == pIpRangesNodeList) {
return pCreateUserStmt;
}
diff --git a/tests/army/cluster/strongPassword.py b/tests/army/cluster/strongPassword.py
index 59922954d1..fb661b5f24 100644
--- a/tests/army/cluster/strongPassword.py
+++ b/tests/army/cluster/strongPassword.py
@@ -35,19 +35,6 @@ class TDTestCase(TBase):
tdSql.execute("alter user test pass '23456789@Abc';")
- #move from password.sim
- tdSql.execute("CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';")
-
- tdSql.error("CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
-
- tdSql.error("CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Invalid password")
-
- tdSql.error("CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
-
- tdSql.error("CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';", expectErrInfo="Password too short or empty")
-
# change setting
tdSql.execute("ALTER ALL DNODES 'enableStrongPassword' '0'")
diff --git a/tests/script/tsim/user/password.sim b/tests/script/tsim/user/password.sim
index cd6c124413..0efd1a0158 100644
--- a/tests/script/tsim/user/password.sim
+++ b/tests/script/tsim/user/password.sim
@@ -271,13 +271,12 @@ sql create user u25 pass 'taosdata1~'
sql create user u26 pass 'taosdata1,'
sql create user u27 pass 'taosdata1.'
-#move case with host to strongPassword.py becase tsim have a memory leak error when using Host
-sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1;
-sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
-sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1;
-sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
-sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1';
-sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0;
+sql CREATE USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest2` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest3` PASS '2729c41' SYSINFO 1 CREATEDB 0 IS_IMPORT 1 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest4` PASS '2729c417' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
+sql_error CREATE USER `_xTest5` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 1' HOST '127.0.0.1';
+sql_error CREATE USER `_xTest6` PASS '2xF' SYSINFO 1 CREATEDB 0 IS_IMPORT 0 HOST '127.0.0.1';
sql_error alter USER `_xTest1` PASS '2729c41a99b2c5222aa7dd9fc1ce3de7';
From 71e38429d6f108b596c6d0b2fa1e71b193e3a829 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Tue, 18 Feb 2025 04:01:38 +0000
Subject: [PATCH 029/105] feat/TS-5927-long-password-docs
---
docs/en/08-operation/14-user.md | 5 ++++-
docs/en/14-reference/03-taos-sql/19-limit.md | 2 +-
docs/zh/08-operation/14-user.md | 5 ++++-
docs/zh/14-reference/03-taos-sql/19-limit.md | 2 +-
4 files changed, 10 insertions(+), 4 deletions(-)
diff --git a/docs/en/08-operation/14-user.md b/docs/en/08-operation/14-user.md
index 5aa8b2e211..8e5fd6b59e 100644
--- a/docs/en/08-operation/14-user.md
+++ b/docs/en/08-operation/14-user.md
@@ -18,7 +18,10 @@ create user user_name pass'password' [sysinfo {1|0}] [createdb {1|0}]
The parameters are explained as follows.
- user_name: Up to 23 B long.
-- password: The password must be between 8 and 16 characters long and include at least three types of characters from the following: uppercase letters, lowercase letters, numbers, and special characters. Special characters include `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`.
+- password: The password must be between 8 and 255 characters long. The password include at least three types of characters from the following: uppercase letters, lowercase letters, numbers, and special characters, special characters include `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`, and this reqirement is able to be closed by adding enableStrongPassword 0 in taos.cfg, or by the following SQL:
+```sql
+alter all dnode 'EnableStrongPassword' '0'
+```
- sysinfo: Whether the user can view system information. 1 means they can view it, 0 means they cannot. System information includes server configuration information, various node information such as dnode, query node (qnode), etc., as well as storage-related information, etc. The default is to view system information.
- createdb: Whether the user can create databases. 1 means they can create databases, 0 means they cannot. The default value is 0. // Supported starting from TDengine Enterprise version 3.3.2.0
diff --git a/docs/en/14-reference/03-taos-sql/19-limit.md b/docs/en/14-reference/03-taos-sql/19-limit.md
index ceed6e828a..23a1448a55 100644
--- a/docs/en/14-reference/03-taos-sql/19-limit.md
+++ b/docs/en/14-reference/03-taos-sql/19-limit.md
@@ -37,6 +37,6 @@ Removed `` ‘“`\ `` (single and double quotes, apostrophe, backslash, space)
- Number of databases, supertables, and tables are not limited by the system, only by system resources
- Number of replicas for a database can only be set to 1 or 3
- Maximum length of username is 23 bytes
-- Maximum length of user password is 31 bytes
+- Maximum length of user password is 255 bytes
- Total number of data rows depends on available resources
- Maximum number of virtual nodes for a single database is 1024
diff --git a/docs/zh/08-operation/14-user.md b/docs/zh/08-operation/14-user.md
index 3a080619aa..bcea85bd71 100644
--- a/docs/zh/08-operation/14-user.md
+++ b/docs/zh/08-operation/14-user.md
@@ -17,7 +17,10 @@ create user user_name pass'password' [sysinfo {1|0}] [createdb {1|0}]
相关参数说明如下。
- user_name:用户名最长不超过 23 个字节。
-- password:密码长度必须为 8 到 16 位,并且至少包含大写字母、小写字母、数字、特殊字符中的三类。特殊字符包括 `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`。(始自 3.3.5.0 版本)
+- password:密码长度必须为 8 到 255 。密码要符合一个要求:至少包含大写字母、小写字母、数字、特殊字符中的三类。特殊字符包括 `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`(始自 3.3.5.0 版本),可以通过在taos.cfg中添加参数enableStrongPassword 0关闭这个强制要求,或者通过如下SQL关闭这个强制要求(始自 3.3.6.0 版本)。
+```sql
+alter all dnode 'EnableStrongPassword' '0'
+```
- sysinfo :用户是否可以查看系统信息。1 表示可以查看,0 表示不可以查看。系统信息包括服务端配置信息、服务端各种节点信息,如 dnode、查询节点(qnode)等,以及与存储相关的信息等。默认为可以查看系统信息。
- createdb:用户是否可以创建数据库。1 表示可以创建,0 表示不可以创建。缺省值为 0。// 从 TDengine 企业版 3.3.2.0 开始支持
diff --git a/docs/zh/14-reference/03-taos-sql/19-limit.md b/docs/zh/14-reference/03-taos-sql/19-limit.md
index e5c03db2fd..27143ddb53 100644
--- a/docs/zh/14-reference/03-taos-sql/19-limit.md
+++ b/docs/zh/14-reference/03-taos-sql/19-limit.md
@@ -37,6 +37,6 @@ description: 合法字符集和命名中的限制规则
- 库的数目,超级表的数目、表的数目,系统不做限制,仅受系统资源限制
- 数据库的副本数只能设置为 1 或 3
- 用户名的最大长度是 23 字节
-- 用户密码的长度范围是 8-16 字节
+- 用户密码的长度范围是 8-255 字节
- 总数据行数取决于可用资源
- 单个数据库的虚拟结点数上限为 1024
From 4fe7ed9a752897738aaa0f90e36296c6a7300e08 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 20 Feb 2025 18:02:04 +0800
Subject: [PATCH 030/105] enh: add csv-related parameters
---
tools/taos-tools/inc/bench.h | 6 +++-
tools/taos-tools/src/benchJsonOpt.c | 44 ++++++++++++++++++-----------
2 files changed, 33 insertions(+), 17 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index d47bafbaf0..968f91d493 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -780,7 +780,11 @@ typedef struct SArguments_S {
bool mistMode;
bool escape_character;
bool pre_load_tb_meta;
- char csvPath[MAX_FILE_NAME_LEN];
+
+ char* csv_output_dir;
+ char* csv_file_prefix;
+ char* csv_ts_format;
+ char* csv_ts_interval;
bool bind_vgroup;
} SArguments;
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 3e41908668..d77306682e 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1586,25 +1586,37 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
}
- g_arguments->csvPath[0] = 0;
- tools_cJSON *csv = tools_cJSON_GetObjectItem(json, "csvPath");
- if (csv && (csv->type == tools_cJSON_String)
- && (csv->valuestring != NULL)) {
- tstrncpy(g_arguments->csvPath, csv->valuestring, MAX_FILE_NAME_LEN);
+ // csv output dir
+ tools_cJSON* csv_od = tools_cJSON_GetObjectItem(json, "csv_output_dir");
+ if (csv_od && csv_od->type == tools_cJSON_String && csv_od->valuestring != NULL) {
+ g_arguments->csv_output_dir = csv_od->valuestring;
+ } else {
+ g_arguments->csv_output_dir = "./output/";
+ }
+ (void)mkdir(g_arguments->csv_output_dir, 0775);
+
+ // csv file prefix
+ tools_cJSON* csv_fp = tools_cJSON_GetObjectItem(json, "csv_file_prefix");
+ if (csv_fp && csv_fp->type == tools_cJSON_String && csv_fp->valuestring != NULL) {
+ g_arguments->csv_file_prefix = csv_fp->valuestring;
+ } else {
+ g_arguments->csv_file_prefix = "data";
}
- size_t len = strlen(g_arguments->csvPath);
-
- if(len == 0) {
- // set default with current path
- strcpy(g_arguments->csvPath, "./output/");
- mkdir(g_arguments->csvPath, 0775);
+ // csv timestamp format
+ tools_cJSON* csv_tf = tools_cJSON_GetObjectItem(json, "csv_ts_format");
+ if (csv_tf && csv_tf->type == tools_cJSON_String && csv_tf->valuestring != NULL) {
+ g_arguments->csv_ts_format = csv_tf->valuestring;
} else {
- // append end
- if (g_arguments->csvPath[len-1] != '/' ) {
- strcat(g_arguments->csvPath, "/");
- }
- mkdir(g_arguments->csvPath, 0775);
+ g_arguments->csv_ts_format = "YYYYMMDDHHmmSS";
+ }
+
+ // csv timestamp format
+ tools_cJSON* csv_ti = tools_cJSON_GetObjectItem(json, "csv_ts_interval");
+ if (csv_ti && csv_ti->type == tools_cJSON_String && csv_ti->valuestring != NULL) {
+ g_arguments->csv_ts_interval = csv_ti->valuestring;
+ } else {
+ g_arguments->csv_ts_interval = "1d";
}
code = 0;
From b93428432c56043c7321089cce11ddd597ebffb0 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 21 Feb 2025 17:04:41 +0800
Subject: [PATCH 031/105] enh: csv-related parameters validity check
---
tools/taos-tools/inc/bench.h | 4 +-
tools/taos-tools/src/benchCsv.c | 264 ++++++++++++++++++++--------
tools/taos-tools/src/benchJsonOpt.c | 12 +-
tools/taos-tools/src/benchMain.c | 2 +-
4 files changed, 205 insertions(+), 77 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index 968f91d493..caabd39d3b 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -781,10 +781,12 @@ typedef struct SArguments_S {
bool escape_character;
bool pre_load_tb_meta;
- char* csv_output_dir;
+ char* csv_output_path;
+ char csv_output_path_buf[MAX_PATH_LEN];
char* csv_file_prefix;
char* csv_ts_format;
char* csv_ts_interval;
+ long csv_ts_intv_secs;
bool bind_vgroup;
} SArguments;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 8186438643..6c08f1281b 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -10,6 +10,11 @@
* FITNESS FOR A PARTICULAR PURPOSE.
*/
+#include
+#include
+#include
+#include
+
#include
#include "benchLog.h"
#include
@@ -22,73 +27,6 @@
#define SHOW_CNT 100000
-static void *csvWriteThread(void *param) {
- // write thread
- for (int i = 0; i < g_arguments->databases->size; i++) {
- // database
- SDataBase * db = benchArrayGet(g_arguments->databases, i);
- for (int j=0; j < db->superTbls->size; j++) {
- // stb
- SSuperTable* stb = benchArrayGet(db->superTbls, j);
- // gen csv
- int ret = genWithSTable(db, stb, g_arguments->csvPath);
- if(ret != 0) {
- errorPrint("failed generate to csv. db=%s stb=%s error code=%d \n", db->dbName, stb->stbName, ret);
- return NULL;
- }
- }
- }
- return NULL;
-}
-
-int csvTestProcess() {
- pthread_t handle;
- int ret = pthread_create(&handle, NULL, csvWriteThread, NULL);
- if (ret != 0) {
- errorPrint("pthread_create failed. error code =%d \n", ret);
- return -1;
- }
-
- infoPrint("start output to csv %s ...\n", g_arguments->csvPath);
- int64_t start = toolsGetTimestampMs();
- pthread_join(handle, NULL);
- int64_t delay = toolsGetTimestampMs() - start;
- infoPrint("output to csv %s finished. delay:%"PRId64"s \n", g_arguments->csvPath, delay/1000);
-
- return 0;
-}
-
-int genWithSTable(SDataBase* db, SSuperTable* stb, char* outDir) {
- // filename
- int ret = 0;
- char outFile[MAX_FILE_NAME_LEN] = {0};
- obtainCsvFile(outFile, db, stb, outDir);
- FILE * fs = fopen(outFile, "w");
- if(fs == NULL) {
- errorPrint("failed create csv file. file=%s, last errno=%d strerror=%s \n", outFile, errno, strerror(errno));
- return -1;
- }
-
- int rowLen = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->lenOfCols + stb->tags->size + stb->cols->size;
- int bufLen = rowLen * g_arguments->reqPerReq;
- char* buf = benchCalloc(1, bufLen, true);
-
- infoPrint("start write csv file: %s \n", outFile);
-
- if (stb->interlaceRows > 0) {
- // interlace mode
- ret = interlaceWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
- } else {
- // batch mode
- ret = batchWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
- }
-
- tmfree(buf);
- fclose(fs);
-
- succPrint("end write csv file: %s \n", outFile);
- return ret;
-}
void obtainCsvFile(char * outFile, SDataBase* db, SSuperTable* stb, char* outDir) {
@@ -125,7 +63,7 @@ int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufL
for(int64_t j = 0; j < stb->insertRows; j++) {
genColumnData(colData, stb, ts, db->precision, &ck);
// combine
- pos += sprintf(buf + pos, "%s,%s\n", tagData, colData);
+ pos += sprintf(buf + pos, "%s,%s.\n", tagData, colData);
if (bufLen - pos < minRemain) {
// submit
ret = writeCsvFile(fs, buf, pos);
@@ -197,7 +135,7 @@ int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int
for (int64_t j = 0; j < needInserts; j++) {
genColumnData(colData, stb, ts, db->precision, &ck);
// combine tags,cols
- pos += sprintf(buf + pos, "%s,%s\n", tagDatas[i], colData);
+ pos += sprintf(buf + pos, "%s,%s.\n", tagDatas[i], colData);
if (bufLen - pos < minRemain) {
// submit
ret = writeCsvFile(fs, buf, pos);
@@ -300,3 +238,191 @@ int32_t genRowByField(char* buf, BArray* fields, int16_t fieldCnt, char* binanry
return pos1;
}
+
+
+int genWithSTable(SDataBase* db, SSuperTable* stb) {
+
+
+
+
+ int ret = 0;
+ char outFile[MAX_FILE_NAME_LEN] = {0};
+ obtainCsvFile(outFile, db, stb, outDir);
+ FILE * fs = fopen(outFile, "w");
+ if(fs == NULL) {
+ errorPrint("failed create csv file. file=%s, last errno=%d strerror=%s \n", outFile, errno, strerror(errno));
+ return -1;
+ }
+
+ int rowLen = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->lenOfCols + stb->tags->size + stb->cols->size;
+ int bufLen = rowLen * g_arguments->reqPerReq;
+ char* buf = benchCalloc(1, bufLen, true);
+
+ infoPrint("start write csv file: %s \n", outFile);
+
+ if (stb->interlaceRows > 0) {
+ // interlace mode
+ ret = interlaceWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
+ } else {
+ // batch mode
+ ret = batchWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
+ }
+
+ tmfree(buf);
+ fclose(fs);
+
+ succPrint("end write csv file: %s \n", outFile);
+ return ret;
+}
+
+
+static int is_valid_csv_ts_format(const char* csv_ts_format) {
+ if (!csv_ts_format) return 0;
+
+ struct tm test_tm = {
+ .tm_year = 70,
+ .tm_mon = 0,
+ .tm_mday = 1,
+ .tm_hour = 0,
+ .tm_min = 0,
+ .tm_sec = 0,
+ .tm_isdst = -1
+ };
+ mktime(&test_tm);
+
+ char buffer[1024];
+ size_t len = strftime(buffer, sizeof(buffer), csv_ts_format, &test_tm);
+ if (len == 0) {
+ return -1;
+ }
+
+ const char* invalid_chars = "/\\:*?\"<>|";
+ if (strpbrk(buffer, invalid_chars) != NULL) {
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static long validate_csv_ts_interval(const char* csv_ts_interval) {
+ if (!csv_ts_interval || *csv_ts_interval == '\0') return -1;
+
+ char* endptr;
+ errno = 0;
+ const long num = strtol(csv_ts_interval, &endptr, 10);
+
+ if (errno == ERANGE ||
+ endptr == csv_ts_interval ||
+ num <= 0) {
+ return -1;
+ }
+
+ if (*endptr == '\0' ||
+ *(endptr + 1) != '\0') {
+ return -1;
+ }
+
+ switch (tolower(*endptr)) {
+ case 's': return num;
+ case 'm': return num * 60;
+ case 'h': return num * 60 * 60;
+ case 'd': return num * 60 * 60 * 24;
+ default : return -1;
+ }
+}
+
+
+static int csvParseParameter() {
+ // csv_output_path
+ {
+ size_t len = strlen(g_arguments->csv_output_path);
+ if (len == 0) {
+ errorPrint("Failed to generate CSV, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
+ db->dbName, stb->stbName);
+ return -1;
+ }
+ if (g_arguments->csv_output_path[len - 1] != '/') {
+ int n = snprintf(g_arguments->csv_output_path_buf, sizeof(g_arguments->csv_output_path_buf), "%s/", g_arguments->csv_output_path);
+ if (n < 0 || n >= sizeof(g_arguments->csv_output_path_buf)) {
+ errorPrint("Failed to generate CSV, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_output_path, db->dbName, stb->stbName);
+ return -1;
+ }
+ g_arguments->csv_output_path = g_arguments->csv_output_path_buf;
+ }
+ }
+
+ // csv_ts_format
+ {
+ if (g_arguments->csv_ts_format) {
+ if (is_valid_csv_ts_format(g_arguments->csv_ts_format) != 0) {
+ errorPrint("Failed to generate CSV, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_ts_format, db->dbName, stb->stbName);
+ return -1;
+ }
+ }
+ }
+
+ // csv_ts_interval
+ {
+ long csv_ts_intv_secs = validate_csv_ts_interval(g_arguments->csv_ts_interval);
+ if (csv_ts_intv_secs <= 0) {
+ errorPrint("Failed to generate CSV, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_ts_interval, db->dbName, stb->stbName);
+ return -1;
+ }
+ g_arguments->csv_ts_intv_secs = csv_ts_intv_secs;
+ }
+
+ return 0;
+}
+
+
+static void csvWriteThread() {
+ for (size_t i = 0; i < g_arguments->databases->size; ++i) {
+ // database
+ SDataBase* db = benchArrayGet(g_arguments->databases, i);
+ if (database->superTbls) {
+ for (size_t j = 0; j < db->superTbls->size; ++j) {
+ // stb
+ SSuperTable* stb = benchArrayGet(db->superTbls, j);
+ if (stb->insertRows == 0) {
+ continue;
+ }
+
+ // gen csv
+ int ret = genWithSTable(db, stb);
+ if(ret != 0) {
+ errorPrint("Failed to generate CSV files. database: %s, super table: %s, error code: %d.\n",
+ db->dbName, stb->stbName, ret);
+ return;
+ }
+ }
+ }
+ }
+
+ return;
+}
+
+
+
+int csvTestProcess() {
+ // parse parameter
+ if (csvParseParameter() != 0) {
+ errorPrint("Failed to generate CSV files. database: %s, super table: %s, error code: %d.\n",
+ db->dbName, stb->stbName, ret);
+ return -1;
+ }
+
+
+
+
+ infoPrint("Starting to output data to CSV files in directory: %s ...\n", g_arguments->csv_output_path);
+ int64_t start = toolsGetTimestampMs();
+ csvWriteThread();
+ int64_t delay = toolsGetTimestampMs() - start;
+ infoPrint("Data export to CSV files in directory: %s has been completed. Time elapsed: %.3f seconds\n",
+ g_arguments->csv_output_path, delay / 1000.0);
+ return 0;
+}
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index d77306682e..a2bf4f07d8 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1587,13 +1587,13 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
// csv output dir
- tools_cJSON* csv_od = tools_cJSON_GetObjectItem(json, "csv_output_dir");
- if (csv_od && csv_od->type == tools_cJSON_String && csv_od->valuestring != NULL) {
- g_arguments->csv_output_dir = csv_od->valuestring;
+ tools_cJSON* csv_op = tools_cJSON_GetObjectItem(json, "csv_output_path");
+ if (csv_op && csv_op->type == tools_cJSON_String && csv_op->valuestring != NULL) {
+ g_arguments->csv_output_path = csv_op->valuestring;
} else {
- g_arguments->csv_output_dir = "./output/";
+ g_arguments->csv_output_path = "./output/";
}
- (void)mkdir(g_arguments->csv_output_dir, 0775);
+ (void)mkdir(g_arguments->csv_output_path, 0775);
// csv file prefix
tools_cJSON* csv_fp = tools_cJSON_GetObjectItem(json, "csv_file_prefix");
@@ -1608,7 +1608,7 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
if (csv_tf && csv_tf->type == tools_cJSON_String && csv_tf->valuestring != NULL) {
g_arguments->csv_ts_format = csv_tf->valuestring;
} else {
- g_arguments->csv_ts_format = "YYYYMMDDHHmmSS";
+ g_arguments->csv_ts_format = NULL;
}
// csv timestamp format
diff --git a/tools/taos-tools/src/benchMain.c b/tools/taos-tools/src/benchMain.c
index 86ad795d05..e82da29468 100644
--- a/tools/taos-tools/src/benchMain.c
+++ b/tools/taos-tools/src/benchMain.c
@@ -153,7 +153,7 @@ int main(int argc, char* argv[]) {
}
} else if (g_arguments->test_mode == CSVFILE_TEST) {
if (csvTestProcess()) {
- errorPrint("%s", "query test process failed\n");
+ errorPrint("%s", "generate csv process failed\n");
ret = -1;
}
} else if (g_arguments->test_mode == QUERY_TEST) {
From a1b7986cbdb3aba332097f12548d2a1adcd4b84a Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 21 Feb 2025 17:08:33 +0800
Subject: [PATCH 032/105] enh: csv-related parameters code indent adjustment
---
tools/taos-tools/src/benchCsv.c | 52 +++++++++++++++------------------
1 file changed, 23 insertions(+), 29 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 6c08f1281b..6f88d2864d 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -335,45 +335,39 @@ static long validate_csv_ts_interval(const char* csv_ts_interval) {
static int csvParseParameter() {
// csv_output_path
- {
- size_t len = strlen(g_arguments->csv_output_path);
- if (len == 0) {
- errorPrint("Failed to generate CSV, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
- db->dbName, stb->stbName);
+ size_t len = strlen(g_arguments->csv_output_path);
+ if (len == 0) {
+ errorPrint("Failed to generate CSV, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
+ db->dbName, stb->stbName);
+ return -1;
+ }
+ if (g_arguments->csv_output_path[len - 1] != '/') {
+ int n = snprintf(g_arguments->csv_output_path_buf, sizeof(g_arguments->csv_output_path_buf), "%s/", g_arguments->csv_output_path);
+ if (n < 0 || n >= sizeof(g_arguments->csv_output_path_buf)) {
+ errorPrint("Failed to generate CSV, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_output_path, db->dbName, stb->stbName);
return -1;
}
- if (g_arguments->csv_output_path[len - 1] != '/') {
- int n = snprintf(g_arguments->csv_output_path_buf, sizeof(g_arguments->csv_output_path_buf), "%s/", g_arguments->csv_output_path);
- if (n < 0 || n >= sizeof(g_arguments->csv_output_path_buf)) {
- errorPrint("Failed to generate CSV, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
- g_arguments->csv_output_path, db->dbName, stb->stbName);
- return -1;
- }
- g_arguments->csv_output_path = g_arguments->csv_output_path_buf;
- }
+ g_arguments->csv_output_path = g_arguments->csv_output_path_buf;
}
// csv_ts_format
- {
- if (g_arguments->csv_ts_format) {
- if (is_valid_csv_ts_format(g_arguments->csv_ts_format) != 0) {
- errorPrint("Failed to generate CSV, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
- g_arguments->csv_ts_format, db->dbName, stb->stbName);
- return -1;
- }
+ if (g_arguments->csv_ts_format) {
+ if (is_valid_csv_ts_format(g_arguments->csv_ts_format) != 0) {
+ errorPrint("Failed to generate CSV, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_ts_format, db->dbName, stb->stbName);
+ return -1;
}
}
// csv_ts_interval
- {
- long csv_ts_intv_secs = validate_csv_ts_interval(g_arguments->csv_ts_interval);
- if (csv_ts_intv_secs <= 0) {
- errorPrint("Failed to generate CSV, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
- g_arguments->csv_ts_interval, db->dbName, stb->stbName);
- return -1;
- }
- g_arguments->csv_ts_intv_secs = csv_ts_intv_secs;
+ long csv_ts_intv_secs = validate_csv_ts_interval(g_arguments->csv_ts_interval);
+ if (csv_ts_intv_secs <= 0) {
+ errorPrint("Failed to generate CSV, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_ts_interval, db->dbName, stb->stbName);
+ return -1;
}
+ g_arguments->csv_ts_intv_secs = csv_ts_intv_secs;
return 0;
}
From 5cb31be1e68fde702d1665b2ff52337af3ec2962 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 24 Feb 2025 16:17:20 +0800
Subject: [PATCH 033/105] enh: add csv writing meta
---
tools/taos-tools/inc/bench.h | 4 +-
tools/taos-tools/inc/benchCsv.h | 33 +-
tools/taos-tools/src/benchCsv.c | 506 +++++++++++++++++++++-------
tools/taos-tools/src/benchJsonOpt.c | 12 +-
4 files changed, 407 insertions(+), 148 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index caabd39d3b..e8c94016f8 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -781,8 +781,8 @@ typedef struct SArguments_S {
bool escape_character;
bool pre_load_tb_meta;
- char* csv_output_path;
- char csv_output_path_buf[MAX_PATH_LEN];
+ char* output_path;
+ char output_path_buf[MAX_PATH_LEN];
char* csv_file_prefix;
char* csv_ts_format;
char* csv_ts_interval;
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 25d0c55eba..a65d5d1c9c 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -18,19 +18,26 @@
#include
+
+typedef enum {
+ CSV_NAMING_SINGLE,
+ CSV_NAMING_TIME_SLICE,
+ CSV_NAMING_THREAD,
+ CSV_NAMING_THREAD_TIME_SLICE
+} CsvNamingType;
+
+typedef struct {
+ CsvNamingType naming_type;
+ time_t start_secs;
+ time_t end_secs;
+ time_t end_ts;
+ size_t thread_id;
+ size_t total_threads;
+ char thread_formatter[TINY_BUFF_LEN];
+} CsvWriteMeta;
+
+
+
int csvTestProcess();
-int genWithSTable(SDataBase* db, SSuperTable* stb, char* outDir);
-
-char * genTagData(char* buf, SSuperTable* stb, int64_t i, int64_t *k);
-
-char * genColumnData(char* colData, SSuperTable* stb, int64_t ts, int32_t precision, int64_t *k);
-
-int32_t genRowByField(char* buf, BArray* fields, int16_t fieldCnt, char* binanryPrefix, char* ncharPrefix, int64_t *k);
-
-void obtainCsvFile(char * outFile, SDataBase* db, SSuperTable* stb, char* outDir);
-
-int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufLen, int minRemain);
-int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufLen, int minRemain);
-
#endif // INC_BENCHCSV_H_
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 6f88d2864d..c7d455c66a 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -15,11 +15,10 @@
#include
#include
-#include
#include "benchLog.h"
-#include
-#include
-
+#include "benchData.h"
+#include "benchDataMix.h"
+#include "benchCsv.h"
//
// main etry
@@ -29,9 +28,7 @@
-void obtainCsvFile(char * outFile, SDataBase* db, SSuperTable* stb, char* outDir) {
- sprintf(outFile, "%s%s-%s.csv", outDir, db->dbName, stb->stbName);
-}
+
int32_t writeCsvFile(FILE* f, char * buf, int32_t len) {
size_t size = fwrite(buf, 1, len, f);
@@ -42,29 +39,36 @@ int32_t writeCsvFile(FILE* f, char * buf, int32_t len) {
return 0;
}
-int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufLen, int minRemain) {
+int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int rows_buf_len, int minRemain) {
int ret = 0;
int pos = 0;
int64_t tk = 0;
int64_t show = 0;
- int tagDataLen = stb->lenOfTags + stb->tags->size + 256;
- char * tagData = (char *) benchCalloc(1, tagDataLen, true);
- int colDataLen = stb->lenOfCols + stb->cols->size + 256;
- char * colData = (char *) benchCalloc(1, colDataLen, true);
+
+ uint32_t tags_length = accumulateRowLen(stbInfo->tags, stbInfo->iface);
+ uint32_t cols_length = accumulateRowLen(stbInfo->cols, stbInfo->iface);
+
+ size_t tags_csv_length = tags_length + stb->tags->size;
+ size_t cols_csv_length = cols_length + stb->cols->size;
+ char* tags_csv_buf = (char*)benchCalloc(1, tags_csv_length, true);
+ char* cols_csv_buf = (char*)benchCalloc(1, cols_csv_length, true);
// gen child name
- for (int64_t i = 0; i < stb->childTblCount; i++) {
+ for (int64_t i = 0; i < stb->childTblCount; ++i) {
int64_t ts = stb->startTimestamp;
int64_t ck = 0;
+
+ // child table
+
// tags
- genTagData(tagData, stb, i, &tk);
+ csvGenRowTagData(tags_csv_buf, stb, i, &tk);
// insert child column data
for(int64_t j = 0; j < stb->insertRows; j++) {
- genColumnData(colData, stb, ts, db->precision, &ck);
+ genColumnData(cols_csv_buf, stb, ts, db->precision, &ck);
// combine
- pos += sprintf(buf + pos, "%s,%s.\n", tagData, colData);
- if (bufLen - pos < minRemain) {
+ pos += sprintf(buf + pos, "%s,%s.\n", tags_csv_buf, cols_csv_buf);
+ if (rows_buf_len - pos < minRemain) {
// submit
ret = writeCsvFile(fs, buf, pos);
if (ret != 0) {
@@ -99,48 +103,277 @@ int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufL
END:
// free
- tmfree(tagData);
- tmfree(colData);
+ tmfree(tags_csv_buf);
+ tmfree(cols_csv_buf);
return ret;
}
-int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int bufLen, int minRemain) {
+
+static time_t csvGetStartSeconds(SDataBase* db, SSuperTable* stb) {
+ time_t start_seconds = 0;
+
+ if (db->precision == TSDB_TIME_PRECISION_MICRO) {
+ start_seconds = stb->startTimestamp / 1000000L;
+ } else if (db->precision == TSDB_TIME_PRECISION_NANO) {
+ start_seconds = stb->startTimestamp / 1000000000L;
+ } else {
+ start_seconds = stb->startTimestamp / 1000L;
+ }
+ return start_seconds;
+}
+
+
+void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_size) {
+ struct tm tm_result;
+ char *old_locale = setlocale(LC_TIME, "C");
+#ifdef _WIN32
+ gmtime_s(&tm_result, &time_value);
+#else
+ gmtime_r(&time_value, &tm_result);
+#endif
+ strftime(time_buf, buf_size, g_arguments->csv_ts_format, &tm_result);
+ if (old_locale) {
+ (LC_TIME, old_locale);
+ }
+}
+
+
+static CsvNamingType csvGetFileNamingType(SSuperTable* stb) {
+ if (stb->interlaceRows > 0) {
+ if (g_arguments->csv_ts_format) {
+ return CSV_NAMING_TIME_SLICE;
+ } else {
+ return CSV_NAMING_SINGLE;
+ }
+ } else {
+ if (g_arguments->csv_ts_format) {
+ return CSV_NAMING_THREAD_TIME_SLICE;
+ } else {
+ return CSV_NAMING_THREAD;
+ }
+ }
+}
+
+
+static void csvGenEndTimestamp(CsvWriteMeta* meta, SDataBase* db) {
+ time_t end_ts = 0;
+
+ if (db->precision == TSDB_TIME_PRECISION_MICRO) {
+ end_ts = meta->end_secs * 1000000L;
+ } else if (db->precision == TSDB_TIME_PRECISION_NANO) {
+ end_ts = meta->end_secs * 1000000000L;
+ } else {
+ end_ts = meta->end_secs * 1000L;
+ }
+ meta->end_ts = end_ts;
+ return;
+}
+
+
+static void csvGenThreadFormatter(CsvWriteMeta* meta) {
+ int digits = 0;
+ if (meta->total_threads == 0) {
+ digits = 1;
+ } else {
+ for (int n = meta->total_threads; n > 0; n /= 10) {
+ digits++;
+ }
+ }
+
+ if (digits <= 1) {
+ (void)sprintf(meta->thread_formatter, "%%d");
+ } else {
+ (void)snprintf(meta->thread_formatter, sizeof(meta->thread_formatter), "%%0%dd", digits);
+ }
+}
+
+
+static CsvWriteMeta csvInitFileNamingMeta(SDataBase* db, SSuperTable* stb) {
+ CsvWriteMeta meta = {
+ .naming_type = CSV_NAMING_SINGLE,
+ .start_secs = 0,
+ .end_secs = 0,
+ .thread_id = 0,
+ .total_threads = 1,
+ .thread_formatter = {}
+ };
+
+ meta.naming_type = csvGetFileNamingType(stb);
+
+ switch (meta.naming_type) {
+ case CSV_NAMING_SINGLE: {
+ break;
+ }
+ case CSV_NAMING_TIME_SLICE: {
+ meta.start_secs = csvGetStartSeconds(db, stb);
+ meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
+ csvGenEndTimestamp(&meta, db);
+ break;
+ }
+ case CSV_NAMING_THREAD: {
+ meta.thread_id = 1;
+ meta.total_threads = g_arguments->nthreads;
+ csvGenThreadFormatter(&meta);
+ break;
+ }
+ case CSV_NAMING_THREAD_TIME_SLICE: {
+ meta.thread_id = 1;
+ meta.total_threads = g_arguments->nthreads;
+ csvGenThreadFormatter(&meta);
+ meta.start_secs = csvGetStartSeconds(db, stb);
+ meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
+ csvGenEndTimestamp(&meta, db);
+ break;
+ }
+ default: {
+ meta.naming_type = CSV_NAMING_SINGLE;
+ break;
+ }
+ }
+
+ return meta;
+}
+
+
+int csvGetFileFullname(CsvWriteMeta* meta, char* fullname, size_t size) {
+ char thread_buf[SMALL_BUFF_LEN];
+ char start_time_buf[MIDDLE_BUFF_LEN];
+ char end_time_buf[MIDDLE_BUFF_LEN];
+ int ret = -1;
+ const char* base_path = g_arguments->output_path;
+ const char* file_prefix = g_arguments->csv_file_prefix;
+
+ switch (meta->naming_type) {
+ case CSV_NAMING_SINGLE: {
+ ret = snprintf(fullname, size, "%s%s.csv", base_path, file_prefix);
+ break;
+ }
+ case CSV_NAMING_TIME_SLICE: {
+ csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ ret = snprintf(fullname, size, "%s%s_%s_%s.csv", base_path, file_prefix, start_time_buf, end_time_buf);
+ break;
+ }
+ case CSV_NAMING_THREAD: {
+ (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
+ ret = snprintf(fullname, size, "%s%s_%s.csv", base_path, file_prefix, thread_buf);
+ break;
+ }
+ case CSV_NAMING_THREAD_TIME_SLICE: {
+ (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
+ csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf);
+ break;
+ }
+ default: {
+ ret = -1;
+ break;
+ }
+ }
+
+ return (ret > 0 && (size_t)ret < size) ? 0 : -1;
+}
+
+
+uint32_t csvCalcInterlaceRows(CsvWriteMeta* meta, SSuperTable* stb, int64_t ts) {
+ uint32_t need_rows = 0;
+
+
+ switch (meta->naming_type) {
+ case CSV_NAMING_SINGLE: {
+ need_rows = stb->interlaceRows;
+ break;
+ }
+ case CSV_NAMING_TIME_SLICE: {
+ (meta->end_ts - ts) / stb->timestamp_step
+ need_rows = stb->interlaceRows;
+
+ break;
+ }
+ case CSV_NAMING_THREAD: {
+ (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
+ ret = snprintf(fullname, size, "%s%s_%s.csv", base_path, file_prefix, thread_buf);
+ break;
+ }
+ case CSV_NAMING_THREAD_TIME_SLICE: {
+ (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
+ csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf);
+ break;
+ }
+ default: {
+ ret = -1;
+ break;
+ }
+ }
+}
+
+
+
+
+static int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fp, char* rows_buf, int rows_buf_len) {
+ char fullname[MAX_PATH_LEN] = {};
+ CsvWriteMeta meta = csvInitFileNamingMeta();
+
+ int ret = csvGetFileFullname(&meta, fullname, sizeof(fullname));
+ if (ret < 0) {
+ errorPrint("Failed to generate csv filename. database: %s, super table: %s, naming type: %d.\n",
+ db->dbName, stb->stbName, meta.naming_type);
+ return -1;
+ }
+
int ret = 0;
int pos = 0;
int64_t n = 0; // already inserted rows for one child table
int64_t tk = 0;
int64_t show = 0;
+ int64_t ts = 0;
+ int64_t last_ts = stb->startTimestamp;
+
+ // init buffer
+ char** tags_buf_bucket = (char **)benchCalloc(stb->childTblCount, sizeof(char *), true);
+ int cols_buf_length = stb->lenOfCols + stb->cols->size;
+ char* cols_buf = (char *)benchCalloc(1, cols_buf_length, true);
+
+ for (int64_t i = 0; i < stb->childTblCount; ++i) {
+ int tags_buf_length = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->tags->size;
+ tags_buf_bucket[i] = benchCalloc(1, tags_buf_length, true);
+ if (!tags_buf_bucket[i]) {
+ ret = -1;
+ goto end;
+ }
+
+ ret = csvGenRowTagData(tags_buf_bucket[i], tags_buf_length, stb, i, &tk);
+ if (!ret) {
+ goto end;
+ }
+ }
- char **tagDatas = (char **)benchCalloc(stb->childTblCount, sizeof(char *), true);
- int colDataLen = stb->lenOfCols + stb->cols->size + 256;
- char * colData = (char *) benchCalloc(1, colDataLen, true);
- int64_t last_ts = stb->startTimestamp;
-
while (n < stb->insertRows ) {
- for (int64_t i = 0; i < stb->childTblCount; i++) {
- // start one table
- int64_t ts = last_ts;
+ for (int64_t i = 0; i < stb->childTblCount; ++i) {
+ ts = last_ts;
int64_t ck = 0;
- // tags
- if (tagDatas[i] == NULL) {
- tagDatas[i] = genTagData(NULL, stb, i, &tk);
- }
+
// calc need insert rows
+ uint32_t need_rows = csvCalcInterlaceRows(&meta, stb, ts)
+
int64_t needInserts = stb->interlaceRows;
if(needInserts > stb->insertRows - n) {
needInserts = stb->insertRows - n;
- }
+ }
for (int64_t j = 0; j < needInserts; j++) {
- genColumnData(colData, stb, ts, db->precision, &ck);
+ genColumnData(cols_buf, stb, ts, db->precision, &ck);
// combine tags,cols
- pos += sprintf(buf + pos, "%s,%s.\n", tagDatas[i], colData);
- if (bufLen - pos < minRemain) {
+ pos += sprintf(buf + pos, "%s,%s\n", tags_buf_bucket[i], cols_buf);
+ if (rows_buf_len - pos < minRemain) {
// submit
- ret = writeCsvFile(fs, buf, pos);
+ ret = writeCsvFile(fp, buf, pos);
if (ret != 0) {
- goto END;
+ goto end;
}
pos = 0;
}
@@ -152,7 +385,7 @@ int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int
if(g_arguments->terminate) {
infoPrint("%s", "You are cancel, exiting ... \n");
ret = -1;
- goto END;
+ goto end;
}
// print show
@@ -170,113 +403,131 @@ int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int
}
if (pos > 0) {
- ret = writeCsvFile(fs, buf, pos);
+ ret = writeCsvFile(fp, buf, pos);
pos = 0;
}
-END:
+end:
// free
for (int64_t m = 0 ; m < stb->childTblCount; m ++) {
- tmfree(tagDatas[m]);
+ tmfree(tags_buf_bucket[m]);
}
- tmfree(tagDatas);
- tmfree(colData);
+ tmfree(tags_buf_bucket);
+ tmfree(cols_buf);
return ret;
}
+
// gen tag data
-char * genTagData(char* buf, SSuperTable* stb, int64_t i, int64_t *k) {
- // malloc
- char* tagData;
- if (buf == NULL) {
- int tagDataLen = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->tags->size + 32;
- tagData = benchCalloc(1, tagDataLen, true);
- } else {
- tagData = buf;
- }
-
- int pos = 0;
+int csvGenRowTagData(char* buf, size_t size, SSuperTable* stb, int64_t index, int64_t* k) {
// tbname
- pos += sprintf(tagData, "\'%s%"PRId64"\'", stb->childTblPrefix, i);
+ int pos = snprintf(buf, size, "\'%s%"PRId64"\'", stb->childTblPrefix, index);
// tags
- pos += genRowByField(tagData + pos, stb->tags, stb->tags->size, stb->binaryPrefex, stb->ncharPrefex, k);
+ pos += csvGenRowFields(buf + pos, stb->tags, stb->tags->size, stb->binaryPrefex, stb->ncharPrefex, k);
- return tagData;
+ return (pos > 0 && (size_t)pos < size) ? 0 : -1;
}
// gen column data
-char * genColumnData(char* colData, SSuperTable* stb, int64_t ts, int32_t precision, int64_t *k) {
+char * genColumnData(char* cols_csv_buf, SSuperTable* stb, int64_t ts, int32_t precision, int64_t *k) {
char szTime[128] = {0};
toolsFormatTimestamp(szTime, ts, precision);
- int pos = sprintf(colData, "\'%s\'", szTime);
+ int pos = sprintf(cols_csv_buf, "\'%s\'", szTime);
// columns
- genRowByField(colData + pos, stb->cols, stb->cols->size, stb->binaryPrefex, stb->ncharPrefex, k);
- return colData;
+ csvGenRowFields(cols_csv_buf + pos, stb->cols, stb->cols->size, stb->binaryPrefex, stb->ncharPrefex, k);
+ return cols_csv_buf;
}
-int32_t genRowByField(char* buf, BArray* fields, int16_t fieldCnt, char* binanryPrefix, char* ncharPrefix, int64_t *k) {
+int32_t csvGenRowFields(char* buf, BArray* fields, int16_t field_count, char* binanry_prefix, char* nchar_prefix, int64_t* k) {
+ int32_t pos = 0;
- // other cols data
- int32_t pos1 = 0;
- for(uint16_t i = 0; i < fieldCnt; i++) {
- Field* fd = benchArrayGet(fields, i);
- char* prefix = "";
- if(fd->type == TSDB_DATA_TYPE_BINARY || fd->type == TSDB_DATA_TYPE_VARBINARY) {
- if(binanryPrefix) {
- prefix = binanryPrefix;
- }
- } else if(fd->type == TSDB_DATA_TYPE_NCHAR) {
- if(ncharPrefix) {
- prefix = ncharPrefix;
- }
+ for (uint16_t i = 0; i < field_count; ++i) {
+ Field* field = benchArrayGet(fields, i);
+ char* prefix = "";
+ if(field->type == TSDB_DATA_TYPE_BINARY || field->type == TSDB_DATA_TYPE_VARBINARY) {
+ if (binanry_prefix) {
+ prefix = binanry_prefix;
+ }
+ } else if(field->type == TSDB_DATA_TYPE_NCHAR) {
+ if (nchar_prefix) {
+ prefix = nchar_prefix;
+ }
+ }
+ pos += dataGenByField(field, buf, pos, prefix, k, "");
}
- pos1 += dataGenByField(fd, buf, pos1, prefix, k, "");
- }
-
- return pos1;
+ return pos;
}
-int genWithSTable(SDataBase* db, SSuperTable* stb) {
-
+int csvGenStbInterlace(SDataBase* db, SSuperTable* stb) {
int ret = 0;
char outFile[MAX_FILE_NAME_LEN] = {0};
obtainCsvFile(outFile, db, stb, outDir);
- FILE * fs = fopen(outFile, "w");
- if(fs == NULL) {
+ FILE* fp = fopen(outFile, "w");
+ if(fp == NULL) {
errorPrint("failed create csv file. file=%s, last errno=%d strerror=%s \n", outFile, errno, strerror(errno));
return -1;
}
- int rowLen = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->lenOfCols + stb->tags->size + stb->cols->size;
- int bufLen = rowLen * g_arguments->reqPerReq;
- char* buf = benchCalloc(1, bufLen, true);
+ int row_buf_len = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->lenOfCols + stb->tags->size + stb->cols->size;
+ int rows_buf_len = row_buf_len * g_arguments->interlaceRows;
+ char* rows_buf = benchCalloc(1, rows_buf_len, true);
infoPrint("start write csv file: %s \n", outFile);
- if (stb->interlaceRows > 0) {
- // interlace mode
- ret = interlaceWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
- } else {
- // batch mode
- ret = batchWriteCsv(db, stb, fs, buf, bufLen, rowLen * 2);
- }
+ // interlace mode
+ ret = interlaceWriteCsv(db, stb, fp, rows_buf, rows_buf_len);
- tmfree(buf);
- fclose(fs);
+
+ tmfree(rows_buf);
+ fclose(fp);
succPrint("end write csv file: %s \n", outFile);
+
+
+ // wait threads
+ for (int i = 0; i < threadCnt; i++) {
+ infoPrint("pthread_join %d ...\n", i);
+ pthread_join(pids[i], NULL);
+ }
+
+
return ret;
}
-static int is_valid_csv_ts_format(const char* csv_ts_format) {
+void csvGenPrepare(SDataBase* db, SSuperTable* stb) {
+ stbInfo->lenOfTags = accumulateRowLen(stbInfo->tags, stbInfo->iface);
+ stbInfo->lenOfCols = accumulateRowLen(stbInfo->cols, stbInfo->iface);
+ return;
+}
+
+
+int csvGenStb(SDataBase* db, SSuperTable* stb) {
+ // prepare
+ csvGenPrepare(db, stb);
+
+
+ int ret = 0;
+ if (stb->interlaceRows > 0) {
+ // interlace mode
+ ret = csvGenStbInterlace(db, stb);
+ } else {
+ // batch mode
+ ret = csvGenStbBatch(db, stb);
+ }
+
+ return ret;
+}
+
+
+static int csvValidateParamTsFormat(const char* csv_ts_format) {
if (!csv_ts_format) return 0;
struct tm test_tm = {
@@ -296,7 +547,11 @@ static int is_valid_csv_ts_format(const char* csv_ts_format) {
return -1;
}
+#ifdef _WIN32
const char* invalid_chars = "/\\:*?\"<>|";
+#else
+ const char* invalid_chars = "/\\?\"<>|";
+#endif
if (strpbrk(buffer, invalid_chars) != NULL) {
return -1;
}
@@ -305,7 +560,7 @@ static int is_valid_csv_ts_format(const char* csv_ts_format) {
}
-static long validate_csv_ts_interval(const char* csv_ts_interval) {
+static long csvValidateParamTsInterval(const char* csv_ts_interval) {
if (!csv_ts_interval || *csv_ts_interval == '\0') return -1;
char* endptr;
@@ -335,35 +590,35 @@ static long validate_csv_ts_interval(const char* csv_ts_interval) {
static int csvParseParameter() {
// csv_output_path
- size_t len = strlen(g_arguments->csv_output_path);
+ size_t len = strlen(g_arguments->output_path);
if (len == 0) {
- errorPrint("Failed to generate CSV, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
+ errorPrint("Failed to generate CSV files, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
db->dbName, stb->stbName);
return -1;
}
- if (g_arguments->csv_output_path[len - 1] != '/') {
- int n = snprintf(g_arguments->csv_output_path_buf, sizeof(g_arguments->csv_output_path_buf), "%s/", g_arguments->csv_output_path);
- if (n < 0 || n >= sizeof(g_arguments->csv_output_path_buf)) {
- errorPrint("Failed to generate CSV, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
- g_arguments->csv_output_path, db->dbName, stb->stbName);
+ if (g_arguments->output_path[len - 1] != '/') {
+ int n = snprintf(g_arguments->output_path_buf, sizeof(g_arguments->output_path_buf), "%s/", g_arguments->output_path);
+ if (n < 0 || n >= sizeof(g_arguments->output_path_buf)) {
+ errorPrint("Failed to generate CSV files, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
+ g_arguments->csv_output_path, db->dbName, stb->stbName);
return -1;
}
- g_arguments->csv_output_path = g_arguments->csv_output_path_buf;
+ g_arguments->output_path = g_arguments->output_path_buf;
}
// csv_ts_format
if (g_arguments->csv_ts_format) {
- if (is_valid_csv_ts_format(g_arguments->csv_ts_format) != 0) {
- errorPrint("Failed to generate CSV, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
+ if (csvValidateParamTsFormat(g_arguments->csv_ts_format) != 0) {
+ errorPrint("Failed to generate CSV files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
g_arguments->csv_ts_format, db->dbName, stb->stbName);
return -1;
}
}
// csv_ts_interval
- long csv_ts_intv_secs = validate_csv_ts_interval(g_arguments->csv_ts_interval);
+ long csv_ts_intv_secs = csvValidateParamTsInterval(g_arguments->csv_ts_interval);
if (csv_ts_intv_secs <= 0) {
- errorPrint("Failed to generate CSV, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
+ errorPrint("Failed to generate CSV files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
g_arguments->csv_ts_interval, db->dbName, stb->stbName);
return -1;
}
@@ -373,12 +628,12 @@ static int csvParseParameter() {
}
-static void csvWriteThread() {
- for (size_t i = 0; i < g_arguments->databases->size; ++i) {
+static int csvWriteThread() {
+ for (size_t i = 0; i < g_arguments->databases->size && !g_arguments->terminate; ++i) {
// database
SDataBase* db = benchArrayGet(g_arguments->databases, i);
if (database->superTbls) {
- for (size_t j = 0; j < db->superTbls->size; ++j) {
+ for (size_t j = 0; j < db->superTbls->size && !g_arguments->terminate; ++j) {
// stb
SSuperTable* stb = benchArrayGet(db->superTbls, j);
if (stb->insertRows == 0) {
@@ -386,37 +641,34 @@ static void csvWriteThread() {
}
// gen csv
- int ret = genWithSTable(db, stb);
+ int ret = csvGenStb(db, stb);
if(ret != 0) {
errorPrint("Failed to generate CSV files. database: %s, super table: %s, error code: %d.\n",
db->dbName, stb->stbName, ret);
- return;
+ return -1;
}
}
}
}
- return;
+ return 0;
}
-
int csvTestProcess() {
- // parse parameter
+ // parsing parameters
if (csvParseParameter() != 0) {
- errorPrint("Failed to generate CSV files. database: %s, super table: %s, error code: %d.\n",
- db->dbName, stb->stbName, ret);
return -1;
}
-
-
-
- infoPrint("Starting to output data to CSV files in directory: %s ...\n", g_arguments->csv_output_path);
+ infoPrint("Starting to output data to CSV files in directory: %s ...\n", g_arguments->output_path);
int64_t start = toolsGetTimestampMs();
- csvWriteThread();
+ int ret = csvWriteThread();
+ if (ret != 0) {
+ return -1;
+ }
int64_t delay = toolsGetTimestampMs() - start;
- infoPrint("Data export to CSV files in directory: %s has been completed. Time elapsed: %.3f seconds\n",
- g_arguments->csv_output_path, delay / 1000.0);
+ infoPrint("Generating CSV files in directory: %s has been completed. Time elapsed: %.3f seconds\n",
+ g_arguments->output_path, delay / 1000.0);
return 0;
}
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index a2bf4f07d8..a88526c278 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1586,14 +1586,14 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
}
- // csv output dir
- tools_cJSON* csv_op = tools_cJSON_GetObjectItem(json, "csv_output_path");
- if (csv_op && csv_op->type == tools_cJSON_String && csv_op->valuestring != NULL) {
- g_arguments->csv_output_path = csv_op->valuestring;
+ // output dir
+ tools_cJSON* opp = tools_cJSON_GetObjectItem(json, "output_path");
+ if (opp && opp->type == tools_cJSON_String && opp->valuestring != NULL) {
+ g_arguments->output_path = opp->valuestring;
} else {
- g_arguments->csv_output_path = "./output/";
+ g_arguments->output_path = "./output/";
}
- (void)mkdir(g_arguments->csv_output_path, 0775);
+ (void)mkdir(g_arguments->output_path, 0775);
// csv file prefix
tools_cJSON* csv_fp = tools_cJSON_GetObjectItem(json, "csv_file_prefix");
From 80ecd4feb42fe475ef08616fffb66a1a30bcd370 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 27 Feb 2025 14:00:00 +0800
Subject: [PATCH 034/105] enh: framework that supports concurrent writing to
csv
---
tools/taos-tools/inc/bench.h | 5 +
tools/taos-tools/inc/benchCsv.h | 43 +-
tools/taos-tools/src/benchCsv.c | 761 ++++++++++++++++------------
tools/taos-tools/src/benchJsonOpt.c | 17 +
4 files changed, 491 insertions(+), 335 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index e8c94016f8..4dd19d83b9 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -20,6 +20,9 @@
#define CURL_STATICLIB
#define ALLOW_FORBID_FUNC
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+#define MIN(a, b) ((a) < (b) ? (a) : (b))
+
#ifdef LINUX
#ifndef _ALPINE
@@ -787,6 +790,8 @@ typedef struct SArguments_S {
char* csv_ts_format;
char* csv_ts_interval;
long csv_ts_intv_secs;
+ bool csv_output_header;
+ bool csv_tbname_alias;
bool bind_vgroup;
} SArguments;
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index a65d5d1c9c..19331b8976 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -20,22 +20,49 @@
typedef enum {
- CSV_NAMING_SINGLE,
- CSV_NAMING_TIME_SLICE,
- CSV_NAMING_THREAD,
- CSV_NAMING_THREAD_TIME_SLICE
+ CSV_NAMING_I_SINGLE,
+ CSV_NAMING_I_TIME_SLICE,
+ CSV_NAMING_B_THREAD,
+ CSV_NAMING_B_THREAD_TIME_SLICE
} CsvNamingType;
+typedef struct {
+ char* buf;
+ int buf_size;
+ int length;
+} CsvRowFieldsBuf;
+
typedef struct {
CsvNamingType naming_type;
- time_t start_secs;
- time_t end_secs;
- time_t end_ts;
- size_t thread_id;
size_t total_threads;
char thread_formatter[TINY_BUFF_LEN];
+ SDataBase* db;
+ SSuperTable* stb;
+ int64_t start_ts;
+ int64_t end_ts;
+ int64_t ts_step;
+ int64_t interlace_step;
} CsvWriteMeta;
+typedef struct {
+ uint64_t ctb_start_idx;
+ uint64_t ctb_end_idx;
+ uint64_t ctb_count;
+ time_t start_secs;
+ time_t end_secs;
+ size_t thread_id;
+ bool output_header;
+ CsvRowFieldsBuf* tags_buf_bucket;
+ CsvRowFieldsBuf* cols_buf;
+} CsvThreadMeta;
+
+typedef struct {
+ CsvWriteMeta* write_meta;
+ CsvThreadMeta thread_meta;
+} CsvThreadArgs;
+
+
+
int csvTestProcess();
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index c7d455c66a..cec38628ad 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -25,105 +25,26 @@
//
#define SHOW_CNT 100000
+#define GEN_ROW_FIELDS_TAG 0
+#define GEN_ROW_FIELDS_COL 1
-
-
-int32_t writeCsvFile(FILE* f, char * buf, int32_t len) {
- size_t size = fwrite(buf, 1, len, f);
- if(size != len) {
- errorPrint("failed to write csv file. expect write length:%d real write length:%d \n", len, (int32_t)size);
- return -1;
- }
- return 0;
-}
-
-int batchWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fs, char* buf, int rows_buf_len, int minRemain) {
- int ret = 0;
- int pos = 0;
- int64_t tk = 0;
- int64_t show = 0;
-
-
- uint32_t tags_length = accumulateRowLen(stbInfo->tags, stbInfo->iface);
- uint32_t cols_length = accumulateRowLen(stbInfo->cols, stbInfo->iface);
-
- size_t tags_csv_length = tags_length + stb->tags->size;
- size_t cols_csv_length = cols_length + stb->cols->size;
- char* tags_csv_buf = (char*)benchCalloc(1, tags_csv_length, true);
- char* cols_csv_buf = (char*)benchCalloc(1, cols_csv_length, true);
-
- // gen child name
- for (int64_t i = 0; i < stb->childTblCount; ++i) {
- int64_t ts = stb->startTimestamp;
- int64_t ck = 0;
-
- // child table
-
- // tags
- csvGenRowTagData(tags_csv_buf, stb, i, &tk);
- // insert child column data
- for(int64_t j = 0; j < stb->insertRows; j++) {
- genColumnData(cols_csv_buf, stb, ts, db->precision, &ck);
- // combine
- pos += sprintf(buf + pos, "%s,%s.\n", tags_csv_buf, cols_csv_buf);
- if (rows_buf_len - pos < minRemain) {
- // submit
- ret = writeCsvFile(fs, buf, pos);
- if (ret != 0) {
- goto END;
- }
-
- pos = 0;
- }
-
- // ts move next
- ts += stb->timestamp_step;
-
- // check cancel
- if(g_arguments->terminate) {
- infoPrint("%s", "You are cancel, exiting ...\n");
- ret = -1;
- goto END;
- }
-
- // print show
- if (++show % SHOW_CNT == 0) {
- infoPrint("batch write child table cnt = %"PRId64 " all rows = %" PRId64 "\n", i+1, show);
- }
-
- }
- }
-
- if (pos > 0) {
- ret = writeCsvFile(fs, buf, pos);
- pos = 0;
- }
-
-END:
- // free
- tmfree(tags_csv_buf);
- tmfree(cols_csv_buf);
- return ret;
-}
-
-
-static time_t csvGetStartSeconds(SDataBase* db, SSuperTable* stb) {
+static time_t csvGetStartSeconds(int precision, int64_t start_ts) {
time_t start_seconds = 0;
- if (db->precision == TSDB_TIME_PRECISION_MICRO) {
- start_seconds = stb->startTimestamp / 1000000L;
- } else if (db->precision == TSDB_TIME_PRECISION_NANO) {
- start_seconds = stb->startTimestamp / 1000000000L;
+ if (precision == TSDB_TIME_PRECISION_MICRO) {
+ start_seconds = start_ts / 1000000L;
+ } else if (precision == TSDB_TIME_PRECISION_NANO) {
+ start_seconds = start_ts / 1000000000L;
} else {
- start_seconds = stb->startTimestamp / 1000L;
+ start_seconds = start_ts / 1000L;
}
return start_seconds;
}
-void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_size) {
+static void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_size) {
struct tm tm_result;
char *old_locale = setlocale(LC_TIME, "C");
#ifdef _WIN32
@@ -133,45 +54,73 @@ void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_size) {
#endif
strftime(time_buf, buf_size, g_arguments->csv_ts_format, &tm_result);
if (old_locale) {
- (LC_TIME, old_locale);
+ setlocale(LC_TIME, old_locale);
}
+ return;
}
static CsvNamingType csvGetFileNamingType(SSuperTable* stb) {
if (stb->interlaceRows > 0) {
if (g_arguments->csv_ts_format) {
- return CSV_NAMING_TIME_SLICE;
+ return CSV_NAMING_I_TIME_SLICE;
} else {
- return CSV_NAMING_SINGLE;
+ return CSV_NAMING_I_SINGLE;
}
} else {
if (g_arguments->csv_ts_format) {
- return CSV_NAMING_THREAD_TIME_SLICE;
+ return CSV_NAMING_B_THREAD_TIME_SLICE;
} else {
- return CSV_NAMING_THREAD;
+ return CSV_NAMING_B_THREAD;
}
}
}
-static void csvGenEndTimestamp(CsvWriteMeta* meta, SDataBase* db) {
- time_t end_ts = 0;
+static void csvCalcTimestampStep(CsvWriteMeta* meta) {
+ time_t ts_step = 0;
- if (db->precision == TSDB_TIME_PRECISION_MICRO) {
- end_ts = meta->end_secs * 1000000L;
+ if (meta->db->precision == TSDB_TIME_PRECISION_MICRO) {
+ ts_step = g_arguments->csv_ts_intv_secs * 1000000L;
} else if (db->precision == TSDB_TIME_PRECISION_NANO) {
- end_ts = meta->end_secs * 1000000000L;
+ ts_step = g_arguments->csv_ts_intv_secs * 1000000000L;
} else {
- end_ts = meta->end_secs * 1000L;
+ ts_step = g_arguments->csv_ts_intv_secs * 1000L;
}
- meta->end_ts = end_ts;
+ meta->ts_step = ts_step;
+ return;
+}
+
+
+static void csvCalcCtbRange(CsvThreadMeta* meta, size_t total_threads, int64_t ctb_offset, int64_t ctb_count) {
+ uint64_t ctb_start_idx = 0;
+ uint64_t ctb_end_idx = 0;
+ size_t tid_idx = meta->thread_id - 1;
+ size_t base = ctb_count / total_threads;
+ size_t remainder = ctb_count % total_threads;
+
+ if (tid_idx < remainder) {
+ ctb_start_idx = ctb_offset + tid_idx * (base + 1);
+ ctb_end_idx = ctb_start_idx + (base + 1);
+ } else {
+ ctb_start_idx = ctb_offset + remainder * (base + 1) + (tid_idx - remainder) * base;
+ ctb_end_idx = ctb_start_idx + base;
+ }
+
+ if (ctb_end_idx > ctb_offset + ctb_count) {
+ ctb_end_idx = ctb_offset + ctb_count;
+ }
+
+ meta->ctb_start_idx = ctb_start_idx;
+ meta->ctb_end_idx = ctb_end_idx;
+ meta->ctb_count = ctb_count;
return;
}
static void csvGenThreadFormatter(CsvWriteMeta* meta) {
int digits = 0;
+
if (meta->total_threads == 0) {
digits = 1;
} else {
@@ -181,52 +130,50 @@ static void csvGenThreadFormatter(CsvWriteMeta* meta) {
}
if (digits <= 1) {
- (void)sprintf(meta->thread_formatter, "%%d");
+ (void)snprintf(meta->thread_formatter, sizeof(meta->thread_formatter), "%%d");
} else {
(void)snprintf(meta->thread_formatter, sizeof(meta->thread_formatter), "%%0%dd", digits);
}
+ return;
}
-static CsvWriteMeta csvInitFileNamingMeta(SDataBase* db, SSuperTable* stb) {
+static CsvWriteMeta csvInitWriteMeta(SDataBase* db, SSuperTable* stb) {
CsvWriteMeta meta = {
- .naming_type = CSV_NAMING_SINGLE,
- .start_secs = 0,
- .end_secs = 0,
- .thread_id = 0,
+ .naming_type = CSV_NAMING_I_SINGLE,
.total_threads = 1,
- .thread_formatter = {}
+ .thread_formatter = {},
+ .db = db,
+ .stb = stb,
+ .start_ts = stb->startTimestamp,
+ .end_ts = stb->startTimestamp + stb->timestamp_step * stb->insertRows,
+ .ts_step = stb->timestamp_step * stb->insertRows,
+ .interlace_step = stb->timestamp_step * stb->interlaceRows
};
meta.naming_type = csvGetFileNamingType(stb);
switch (meta.naming_type) {
- case CSV_NAMING_SINGLE: {
+ case CSV_NAMING_I_SINGLE: {
break;
}
- case CSV_NAMING_TIME_SLICE: {
- meta.start_secs = csvGetStartSeconds(db, stb);
- meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
- csvGenEndTimestamp(&meta, db);
+ case CSV_NAMING_I_TIME_SLICE: {
+ csvCalcTimestampStep(&meta);
break;
}
- case CSV_NAMING_THREAD: {
- meta.thread_id = 1;
+ case CSV_NAMING_B_THREAD: {
meta.total_threads = g_arguments->nthreads;
csvGenThreadFormatter(&meta);
break;
}
- case CSV_NAMING_THREAD_TIME_SLICE: {
- meta.thread_id = 1;
+ case CSV_NAMING_B_THREAD_TIME_SLICE: {
meta.total_threads = g_arguments->nthreads;
csvGenThreadFormatter(&meta);
- meta.start_secs = csvGetStartSeconds(db, stb);
- meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
- csvGenEndTimestamp(&meta, db);
+ csvCalcTimestampStep(&meta);
break;
}
default: {
- meta.naming_type = CSV_NAMING_SINGLE;
+ meta.naming_type = CSV_NAMING_I_SINGLE;
break;
}
}
@@ -235,7 +182,67 @@ static CsvWriteMeta csvInitFileNamingMeta(SDataBase* db, SSuperTable* stb) {
}
-int csvGetFileFullname(CsvWriteMeta* meta, char* fullname, size_t size) {
+static CsvThreadMeta csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id) {
+ SDataBase* db = write_meta->db;
+ SSuperTable* stb = write_meta->stb;
+ CsvThreadMeta meta = {
+ .ctb_start_idx = 0,
+ .ctb_end_idx = 0,
+ .ctb_count = 0,
+ .start_secs = 0,
+ .end_secs = 0,
+ .thread_id = thread_id,
+ .tags_buf_bucket = NULL,
+ .cols_buf = NULL
+ };
+
+ csvCalcCtbRange(&meta, write_meta->total_threads, stb->childTblFrom, stb->childTblCount);
+
+ switch (write_meta->naming_type) {
+ case CSV_NAMING_I_SINGLE:
+ case CSV_NAMING_B_THREAD: {
+ break;
+ }
+ case CSV_NAMING_I_TIME_SLICE:
+ case CSV_NAMING_B_THREAD_TIME_SLICE: {
+ meta.start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp);
+ meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
+ break;
+ }
+ default: {
+ meta.naming_type = CSV_NAMING_I_SINGLE;
+ break;
+ }
+ }
+
+ return meta;
+}
+
+
+static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta, int64_t last_end_ts) {
+ SDataBase* db = write_meta->db;
+
+ switch (write_meta->naming_type) {
+ case CSV_NAMING_I_SINGLE:
+ case CSV_NAMING_B_THREAD: {
+ break;
+ }
+ case CSV_NAMING_I_TIME_SLICE:
+ case CSV_NAMING_B_THREAD_TIME_SLICE: {
+ thread_meta->start_secs = csvGetStartSeconds(db->precision, last_end_ts);
+ thread_meta->end_secs = thread_meta.start_secs + g_arguments->csv_ts_intv_secs;
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+
+ return;
+}
+
+
+static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta, char* fullname, size_t size) {
char thread_buf[SMALL_BUFF_LEN];
char start_time_buf[MIDDLE_BUFF_LEN];
char end_time_buf[MIDDLE_BUFF_LEN];
@@ -244,22 +251,22 @@ int csvGetFileFullname(CsvWriteMeta* meta, char* fullname, size_t size) {
const char* file_prefix = g_arguments->csv_file_prefix;
switch (meta->naming_type) {
- case CSV_NAMING_SINGLE: {
+ case CSV_NAMING_I_SINGLE: {
ret = snprintf(fullname, size, "%s%s.csv", base_path, file_prefix);
break;
}
- case CSV_NAMING_TIME_SLICE: {
+ case CSV_NAMING_I_TIME_SLICE: {
csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
ret = snprintf(fullname, size, "%s%s_%s_%s.csv", base_path, file_prefix, start_time_buf, end_time_buf);
break;
}
- case CSV_NAMING_THREAD: {
+ case CSV_NAMING_B_THREAD: {
(void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
ret = snprintf(fullname, size, "%s%s_%s.csv", base_path, file_prefix, thread_buf);
break;
}
- case CSV_NAMING_THREAD_TIME_SLICE: {
+ case CSV_NAMING_B_THREAD_TIME_SLICE: {
(void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
@@ -276,184 +283,55 @@ int csvGetFileFullname(CsvWriteMeta* meta, char* fullname, size_t size) {
}
-uint32_t csvCalcInterlaceRows(CsvWriteMeta* meta, SSuperTable* stb, int64_t ts) {
- uint32_t need_rows = 0;
+static int64_t csvCalcSliceBatchTimestamp(CsvWriteMeta* write_meta, int64_t slice_cur_ts, int64_t slice_end_ts) {
+ int64_t slice_batch_ts = 0;
-
- switch (meta->naming_type) {
- case CSV_NAMING_SINGLE: {
- need_rows = stb->interlaceRows;
+ switch (write_meta->naming_type) {
+ case CSV_NAMING_I_SINGLE:
+ case CSV_NAMING_I_TIME_SLICE: {
+ slice_batch_ts = MIN(slice_cur_ts + write_meta->interlace_step, slice_end_ts);
break;
}
- case CSV_NAMING_TIME_SLICE: {
- (meta->end_ts - ts) / stb->timestamp_step
- need_rows = stb->interlaceRows;
-
- break;
- }
- case CSV_NAMING_THREAD: {
- (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
- ret = snprintf(fullname, size, "%s%s_%s.csv", base_path, file_prefix, thread_buf);
- break;
- }
- case CSV_NAMING_THREAD_TIME_SLICE: {
- (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
- csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
- csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
- ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf);
+ case CSV_NAMING_B_THREAD:
+ case CSV_NAMING_B_THREAD_TIME_SLICE: {
+ slice_batch_ts = slice_end_ts;
break;
}
default: {
- ret = -1;
break;
}
}
+
+ return slice_batch_ts;
}
+static int csvGenRowFields(char* buf, int size, SSuperTable* stb, int fields_cate, int64_t* k) {
+ int pos = 0;
+ BArray* fields = NULL;
+ int16_t field_count = 0;
+ char* binanry_prefix = stb->binaryPrefex ? stb->binaryPrefex : "";
+ char* nchar_prefix = stb->ncharPrefex ? stb->ncharPrefex : "";
-
-static int interlaceWriteCsv(SDataBase* db, SSuperTable* stb, FILE* fp, char* rows_buf, int rows_buf_len) {
- char fullname[MAX_PATH_LEN] = {};
- CsvWriteMeta meta = csvInitFileNamingMeta();
-
- int ret = csvGetFileFullname(&meta, fullname, sizeof(fullname));
- if (ret < 0) {
- errorPrint("Failed to generate csv filename. database: %s, super table: %s, naming type: %d.\n",
- db->dbName, stb->stbName, meta.naming_type);
+ if (!buf || !stb || !k || size <= 0) {
return -1;
}
- int ret = 0;
- int pos = 0;
- int64_t n = 0; // already inserted rows for one child table
- int64_t tk = 0;
- int64_t show = 0;
- int64_t ts = 0;
- int64_t last_ts = stb->startTimestamp;
-
- // init buffer
- char** tags_buf_bucket = (char **)benchCalloc(stb->childTblCount, sizeof(char *), true);
- int cols_buf_length = stb->lenOfCols + stb->cols->size;
- char* cols_buf = (char *)benchCalloc(1, cols_buf_length, true);
-
- for (int64_t i = 0; i < stb->childTblCount; ++i) {
- int tags_buf_length = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->tags->size;
- tags_buf_bucket[i] = benchCalloc(1, tags_buf_length, true);
- if (!tags_buf_bucket[i]) {
- ret = -1;
- goto end;
- }
-
- ret = csvGenRowTagData(tags_buf_bucket[i], tags_buf_length, stb, i, &tk);
- if (!ret) {
- goto end;
- }
+ if (fields_cate == GEN_ROW_FIELDS_TAG) {
+ fields = stb->tags;
+ field_count = stb->tags->size;
+ } else {
+ fields = stb->cols;
+ field_count = stb->cols->size;
}
- while (n < stb->insertRows ) {
- for (int64_t i = 0; i < stb->childTblCount; ++i) {
- ts = last_ts;
- int64_t ck = 0;
-
-
- // calc need insert rows
- uint32_t need_rows = csvCalcInterlaceRows(&meta, stb, ts)
-
- int64_t needInserts = stb->interlaceRows;
- if(needInserts > stb->insertRows - n) {
- needInserts = stb->insertRows - n;
- }
-
- for (int64_t j = 0; j < needInserts; j++) {
- genColumnData(cols_buf, stb, ts, db->precision, &ck);
- // combine tags,cols
- pos += sprintf(buf + pos, "%s,%s\n", tags_buf_bucket[i], cols_buf);
- if (rows_buf_len - pos < minRemain) {
- // submit
- ret = writeCsvFile(fp, buf, pos);
- if (ret != 0) {
- goto end;
- }
- pos = 0;
- }
-
- // ts move next
- ts += stb->timestamp_step;
-
- // check cancel
- if(g_arguments->terminate) {
- infoPrint("%s", "You are cancel, exiting ... \n");
- ret = -1;
- goto end;
- }
-
- // print show
- if (++show % SHOW_CNT == 0) {
- infoPrint("interlace write child table index = %"PRId64 " all rows = %"PRId64 "\n", i+1, show);
- }
- }
-
- // if last child table
- if (i + 1 == stb->childTblCount ) {
- n += needInserts;
- last_ts = ts;
- }
- }
- }
-
- if (pos > 0) {
- ret = writeCsvFile(fp, buf, pos);
- pos = 0;
- }
-
-end:
- // free
- for (int64_t m = 0 ; m < stb->childTblCount; m ++) {
- tmfree(tags_buf_bucket[m]);
- }
- tmfree(tags_buf_bucket);
- tmfree(cols_buf);
- return ret;
-}
-
-
-// gen tag data
-int csvGenRowTagData(char* buf, size_t size, SSuperTable* stb, int64_t index, int64_t* k) {
- // tbname
- int pos = snprintf(buf, size, "\'%s%"PRId64"\'", stb->childTblPrefix, index);
- // tags
- pos += csvGenRowFields(buf + pos, stb->tags, stb->tags->size, stb->binaryPrefex, stb->ncharPrefex, k);
-
- return (pos > 0 && (size_t)pos < size) ? 0 : -1;
-}
-
-// gen column data
-char * genColumnData(char* cols_csv_buf, SSuperTable* stb, int64_t ts, int32_t precision, int64_t *k) {
- char szTime[128] = {0};
- toolsFormatTimestamp(szTime, ts, precision);
- int pos = sprintf(cols_csv_buf, "\'%s\'", szTime);
-
- // columns
- csvGenRowFields(cols_csv_buf + pos, stb->cols, stb->cols->size, stb->binaryPrefex, stb->ncharPrefex, k);
- return cols_csv_buf;
-}
-
-
-int32_t csvGenRowFields(char* buf, BArray* fields, int16_t field_count, char* binanry_prefix, char* nchar_prefix, int64_t* k) {
- int32_t pos = 0;
-
for (uint16_t i = 0; i < field_count; ++i) {
Field* field = benchArrayGet(fields, i);
char* prefix = "";
if(field->type == TSDB_DATA_TYPE_BINARY || field->type == TSDB_DATA_TYPE_VARBINARY) {
- if (binanry_prefix) {
- prefix = binanry_prefix;
- }
+ prefix = binanry_prefix;
} else if(field->type == TSDB_DATA_TYPE_NCHAR) {
- if (nchar_prefix) {
- prefix = nchar_prefix;
- }
+ prefix = nchar_prefix;
}
pos += dataGenByField(field, buf, pos, prefix, k, "");
}
@@ -462,68 +340,297 @@ int32_t csvGenRowFields(char* buf, BArray* fields, int16_t field_count, char* bi
}
-
-int csvGenStbInterlace(SDataBase* db, SSuperTable* stb) {
-
-
- int ret = 0;
- char outFile[MAX_FILE_NAME_LEN] = {0};
- obtainCsvFile(outFile, db, stb, outDir);
- FILE* fp = fopen(outFile, "w");
- if(fp == NULL) {
- errorPrint("failed create csv file. file=%s, last errno=%d strerror=%s \n", outFile, errno, strerror(errno));
+static int csvGenRowTagData(char* buf, int size, SSuperTable* stb, int64_t index, int64_t* k) {
+ if (!buf || !stb || !k || size <= 0) {
return -1;
}
- int row_buf_len = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->lenOfCols + stb->tags->size + stb->cols->size;
- int rows_buf_len = row_buf_len * g_arguments->interlaceRows;
- char* rows_buf = benchCalloc(1, rows_buf_len, true);
+ // tbname
+ int pos = snprintf(buf, size, "\'%s%"PRId64"\'", stb->childTblPrefix, index);
- infoPrint("start write csv file: %s \n", outFile);
+ // tags
+ pos += csvGenRowFields(buf + pos, size - pos, stb, GEN_ROW_FIELDS_TAG, k);
- // interlace mode
- ret = interlaceWriteCsv(db, stb, fp, rows_buf, rows_buf_len);
-
-
- tmfree(rows_buf);
- fclose(fp);
-
- succPrint("end write csv file: %s \n", outFile);
-
-
- // wait threads
- for (int i = 0; i < threadCnt; i++) {
- infoPrint("pthread_join %d ...\n", i);
- pthread_join(pids[i], NULL);
- }
-
-
- return ret;
+ return (pos > 0 && pos < size) ? pos : -1;
}
-void csvGenPrepare(SDataBase* db, SSuperTable* stb) {
- stbInfo->lenOfTags = accumulateRowLen(stbInfo->tags, stbInfo->iface);
- stbInfo->lenOfCols = accumulateRowLen(stbInfo->cols, stbInfo->iface);
+static int csvGenRowColData(char* buf, int size, SSuperTable* stb, int64_t ts, int32_t precision, int64_t *k) {
+ char ts_fmt[128] = {0};
+ toolsFormatTimestamp(ts_fmt, ts, precision);
+ int pos = snprintf(buf, size, "\'%s\'", ts_fmt);
+
+ // columns
+ pos += csvGenRowFields(buf + pos, size - pos, stb, GEN_ROW_FIELDS_COL, k);
+ return (pos > 0 && pos < size) ? pos : -1;
+}
+
+
+static CsvRowFieldsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
+ SSuperTable* stb = write_meta->stb;
+ int ret = 0;
+ int64_t tk = 0;
+
+ if (!write_meta || !thread_meta) {
+ return NULL;
+ }
+
+ CsvRowFieldsBuf* tags_buf_bucket = (CsvRowFieldsBuf*)benchCalloc(thread_meta->ctb_count, sizeof(CsvRowFieldsBuf), true);
+ if (!tags_buf_bucket) {
+ return NULL;
+ }
+
+ char* tags_buf = NULL;
+ int tags_buf_size = TSDB_TABLE_NAME_LEN + stb->lenOfTags + stb->tags->size;
+ for (uint64_t i = 0; i < thread_meta->ctb_count; ++i) {
+ tags_buf = benchCalloc(1, tags_buf_size, true);
+ if (!tags_buf) {
+ goto error;
+ }
+
+ tags_buf_bucket[i].buf = tags_buf;
+ tags_buf_bucket[i].buf_size = tags_buf_size;
+
+ ret = csvGenRowTagData(tags_buf, tags_buf_size, stb, thread_meta->ctb_start_idx + i, &tk);
+ if (ret <= 0) {
+ goto error;
+ }
+
+ tags_buf_bucket[i].length = ret;
+ }
+
+ return tags_buf_bucket;
+
+error:
+ csvFreeCtbTagData(thread_meta, tags_buf_bucket);
+ return NULL;
+}
+
+
+static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowFieldsBuf* tags_buf_bucket) {
+ if (!thread_meta || !tags_buf_bucket) {
+ return;
+ }
+
+ for (uint64_t i = 0 ; i < thread_meta->ctb_count; ++i) {
+ char* tags_buf = tags_buf_bucket[i].buf;
+ if (tags_buf) {
+ tmfree(tags_buf_bucket);
+ } else {
+ break;
+ }
+ }
+ tmfree(tags_buf_bucket);
return;
}
-int csvGenStb(SDataBase* db, SSuperTable* stb) {
+static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck, CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
+ SDataBase* db = write_meta->db;
+ SSuperTable* stb = write_meta->stb;
+ CsvRowFieldsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
+ CsvRowFieldsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
+ CsvRowFieldsBuf* cols_buf = thread_meta->cols_buf;
+ int ret = 0;
+
+
+ ret = csvGenRowColData(cols_buf->buf, cols_buf->buf_size, stb, cur_ts, db->precision, ck);
+ if (ret <= 0) {
+ errorPrint("Failed to generate csv column data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ return -1;
+ }
+
+ cols_buf->length = ret;
+
+
+ // write header
+ if (thread_meta->output_header) {
+ // TODO
+
+ thread_meta->output_header = false;
+ }
+
+
+ // write columns
+ size_t written = fwrite(cols_buf->buf, 1, cols_buf->length, fp);
+ if (written != cols_buf->length) {
+ errorPrint("Failed to write csv column data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ cols_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ return -1;
+ }
+
+
+ // write tags
+ size_t written = fwrite(tags_buf->buf, 1, tags_buf->length, fp);
+ if (written != tags_buf->length) {
+ errorPrint("Failed to write csv tag data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ tags_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static void* csvGenStbThread(void* arg) {
+ CsvThreadArgs* thread_arg = (CsvThreadArgs*)arg;
+ CsvWriteMeta* write_meta = thread_arg->write_meta;
+ CsvThreadMeta* thread_meta = &thread_arg->thread_meta;
+ SDataBase* db = write_meta->db;
+ SSuperTable* stb = write_meta->stb;
+
+ int64_t cur_ts = 0;
+ int64_t slice_cur_ts = 0;
+ int64_t slice_end_ts = 0;
+ int64_t slice_batch_ts = 0;
+ int64_t slice_ctb_cur_ts = 0;
+ int64_t ck = 0;
+ uint64_t ctb_idx = 0;
+ int ret = 0;
+ FILE* fp = NULL;
+ char fullname[MAX_PATH_LEN] = {};
+
+
+ // tags buffer
+ CsvRowFieldsBuf* tags_buf_bucket = csvGenCtbTagData(write_meta, thread_meta);
+ if (!tags_buf_bucket) {
+ errorPrint("Failed to generate csv tag data. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
+ db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ return NULL;
+ }
+
+ // column buffer
+ int buf_size = stb->lenOfCols + stb->cols->size;
+ char* buf = (char*)benchCalloc(1, buf_size, true);
+ if (!buf) {
+ errorPrint("Failed to malloc csv column buffer. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
+ db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ goto end;
+ }
+
+ CsvRowFieldsBuf cols_buf = {
+ .buf = buf,
+ .buf_size = buf_size,
+ .length = 0
+ };
+
+ thread_meta->tags_buf_bucket = tags_buf_bucket;
+ thread_meta->cols_buf = &cols_buf;
+
+
+ for (cur_ts = write_meta->start_ts; cur_ts < write_meta->end_ts; cur_ts += write_meta->ts_step) {
+ // get filename
+ fullname[MAX_PATH_LEN] = {};
+ ret = csvGetFileFullname(write_meta, thread_meta, fullname, sizeof(fullname));
+ if (ret < 0) {
+ errorPrint("Failed to generate csv filename. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
+ db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ goto end;
+ }
+
+ // create fd
+ fp = fopen(fullname, "w");
+ if (fp == NULL) {
+ errorPrint("Failed to create csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
+ thread_meta->thread_id, fullname, errno, strerror(errno));
+ goto end;
+ }
+
+
+ thread_meta->output_header = g_arguments->csv_output_header;
+ slice_cur_ts = cur_ts;
+ slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
+
+ // write data
+ while (slice_cur_ts < slice_end_ts) {
+ slice_batch_ts = csvCalcSliceBatchTimestamp(write_meta, slice_cur_ts, slice_end_ts);
+
+ for (ctb_idx = 0; ctb_idx < thread_meta->ctb_count; ++ctb_idx) {
+ for (slice_ctb_cur_ts = slice_cur_ts; slice_ctb_cur_ts < slice_batch_ts; slice_ctb_cur_ts += write_meta->stb->timestamp_step) {
+ ret = csvWriteFile(fp, ctb_idx, slice_ctb_cur_ts, &ck, write_meta, thread_meta);
+ if (!ret) {
+ errorPrint("Failed to write csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
+ thread_meta->thread_id, fullname, errno, strerror(errno));
+ fclose(fp);
+ goto end;
+ }
+
+ ck += 1;
+ }
+ }
+
+ slice_cur_ts = slice_batch_ts;
+ }
+
+ fclose(fp);
+ csvUpdateSliceRange(write_meta, thread_meta, last_end_ts);
+ }
+
+end:
+ csvFreeCtbTagData(tags_buf_bucket);
+ tmfree(cols_buf);
+ return NULL;
+}
+
+
+static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
+ int ret = 0;
+ CsvWriteMeta write_meta = csvInitWriteMeta(db, stb);
+
+ pthread_t* pids = benchCalloc(write_meta.total_threads, sizeof(pthread_t), false);
+ if (!pids) {
+ ret = -1;
+ goto end;
+ }
+ CsvThreadArgs* args = benchCalloc(write_meta.total_threads, sizeof(CsvThreadArgs), false);
+ if (!args) {
+ ret = -1;
+ goto end;
+ }
+
+ for (uint32_t i = 0; (i < write_meta.total_threads && !g_arguments->terminate); ++i) {
+ CsvThreadArgs* arg = &args[i];
+ arg->write_meta = &write_meta;
+ arg->thread_meta = csvInitThreadMeta(&write_meta, i + 1);
+
+ ret = pthread_create(&pids[i], NULL, csvGenStbThread, arg);
+ if (!ret) {
+ perror("Failed to create thread");
+ goto end;
+ }
+ }
+
+ // wait threads
+ for (uint32_t i = 0; i < write_meta.total_threads; ++i) {
+ infoPrint("pthread_join %d ...\n", i);
+ pthread_join(pids[i], NULL);
+ }
+
+end:
+ tmfree(pids);
+ tmfree(args);
+ return ret;
+}
+
+
+static void csvGenPrepare(SDataBase* db, SSuperTable* stb) {
+ stb->lenOfTags = accumulateRowLen(stb->tags, stb->iface);
+ stb->lenOfCols = accumulateRowLen(stb->cols, stb->iface);
+
+ if (stb->childTblTo) {
+ stb->childTblCount = stb->childTblTo - stb->childTblFrom;
+ }
+
+ return;
+}
+
+
+static int csvGenStb(SDataBase* db, SSuperTable* stb) {
// prepare
csvGenPrepare(db, stb);
-
- int ret = 0;
- if (stb->interlaceRows > 0) {
- // interlace mode
- ret = csvGenStbInterlace(db, stb);
- } else {
- // batch mode
- ret = csvGenStbBatch(db, stb);
- }
-
- return ret;
+ return csvGenStbProcess(db, stb);
}
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index a88526c278..4cf690204e 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1619,6 +1619,23 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
g_arguments->csv_ts_interval = "1d";
}
+ // csv output header
+ g_arguments->csv_output_header = false;
+ tools_cJSON* oph = tools_cJSON_GetObjectItem(json, "csv_output_header");
+ if (oph && oph->type == tools_cJSON_String && oph->valuestring != NULL) {
+ if (0 == strcasecmp(oph->valuestring, "yes")) {
+ g_arguments->csv_output_header = true;
+ }
+ }
+
+ // csv tbname alias
+ tools_cJSON* tba = tools_cJSON_GetObjectItem(json, "csv_tbname_alias");
+ if (tba && tba->type == tools_cJSON_String && tba->valuestring != NULL) {
+ g_arguments->csv_tbname_alias = tba->valuestring;
+ } else {
+ g_arguments->csv_tbname_alias = "device_id";
+ }
+
code = 0;
return code;
}
From 3627a54c133b4ec9191c365ae51baebb69f2d586 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 27 Feb 2025 14:17:23 +0800
Subject: [PATCH 035/105] enh: extract bufer size of all child tables
---
tools/taos-tools/inc/benchCsv.h | 12 +++++++++---
tools/taos-tools/src/benchCsv.c | 20 ++++++++++----------
2 files changed, 19 insertions(+), 13 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 19331b8976..717dfd8a71 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -26,11 +26,16 @@ typedef enum {
CSV_NAMING_B_THREAD_TIME_SLICE
} CsvNamingType;
+typedef struct {
+ char* buf;
+ int length;
+} CsvRowTagsBuf;
+
typedef struct {
char* buf;
int buf_size;
int length;
-} CsvRowFieldsBuf;
+} CsvRowColsBuf;
typedef struct {
CsvNamingType naming_type;
@@ -52,8 +57,9 @@ typedef struct {
time_t end_secs;
size_t thread_id;
bool output_header;
- CsvRowFieldsBuf* tags_buf_bucket;
- CsvRowFieldsBuf* cols_buf;
+ int tags_buf_size;
+ CsvRowTagsBuf* tags_buf_bucket;
+ CsvRowColsBuf* cols_buf;
} CsvThreadMeta;
typedef struct {
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index cec38628ad..95c94dc57d 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -366,7 +366,7 @@ static int csvGenRowColData(char* buf, int size, SSuperTable* stb, int64_t ts, i
}
-static CsvRowFieldsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
+static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
SSuperTable* stb = write_meta->stb;
int ret = 0;
int64_t tk = 0;
@@ -375,7 +375,7 @@ static CsvRowFieldsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta
return NULL;
}
- CsvRowFieldsBuf* tags_buf_bucket = (CsvRowFieldsBuf*)benchCalloc(thread_meta->ctb_count, sizeof(CsvRowFieldsBuf), true);
+ CsvRowTagsBuf* tags_buf_bucket = (CsvRowTagsBuf*)benchCalloc(thread_meta->ctb_count, sizeof(CsvRowTagsBuf), true);
if (!tags_buf_bucket) {
return NULL;
}
@@ -388,8 +388,8 @@ static CsvRowFieldsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta
goto error;
}
- tags_buf_bucket[i].buf = tags_buf;
- tags_buf_bucket[i].buf_size = tags_buf_size;
+ tags_buf_bucket[i].buf = tags_buf;
+ write_meta->tags_buf_size = tags_buf_size;
ret = csvGenRowTagData(tags_buf, tags_buf_size, stb, thread_meta->ctb_start_idx + i, &tk);
if (ret <= 0) {
@@ -407,7 +407,7 @@ error:
}
-static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowFieldsBuf* tags_buf_bucket) {
+static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowTagsBuf* tags_buf_bucket) {
if (!thread_meta || !tags_buf_bucket) {
return;
}
@@ -428,9 +428,9 @@ static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowFieldsBuf* tags_
static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck, CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
SDataBase* db = write_meta->db;
SSuperTable* stb = write_meta->stb;
- CsvRowFieldsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
- CsvRowFieldsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
- CsvRowFieldsBuf* cols_buf = thread_meta->cols_buf;
+ CsvRowTagsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
+ CsvRowColsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
+ CsvRowColsBuf* cols_buf = thread_meta->cols_buf;
int ret = 0;
@@ -493,7 +493,7 @@ static void* csvGenStbThread(void* arg) {
// tags buffer
- CsvRowFieldsBuf* tags_buf_bucket = csvGenCtbTagData(write_meta, thread_meta);
+ CsvRowTagsBuf* tags_buf_bucket = csvGenCtbTagData(write_meta, thread_meta);
if (!tags_buf_bucket) {
errorPrint("Failed to generate csv tag data. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
@@ -509,7 +509,7 @@ static void* csvGenStbThread(void* arg) {
goto end;
}
- CsvRowFieldsBuf cols_buf = {
+ CsvRowColsBuf cols_buf = {
.buf = buf,
.buf_size = buf_size,
.length = 0
From 5ce4bd2465fd2e58f1d2b0da91c4cb85221aa0a6 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 27 Feb 2025 15:13:58 +0800
Subject: [PATCH 036/105] refactor: csv init write & thread meta
---
tools/taos-tools/src/benchCsv.c | 81 +++++++++++++++++----------------
1 file changed, 42 insertions(+), 39 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 95c94dc57d..673e223959 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -138,38 +138,34 @@ static void csvGenThreadFormatter(CsvWriteMeta* meta) {
}
-static CsvWriteMeta csvInitWriteMeta(SDataBase* db, SSuperTable* stb) {
- CsvWriteMeta meta = {
- .naming_type = CSV_NAMING_I_SINGLE,
- .total_threads = 1,
- .thread_formatter = {},
- .db = db,
- .stb = stb,
- .start_ts = stb->startTimestamp,
- .end_ts = stb->startTimestamp + stb->timestamp_step * stb->insertRows,
- .ts_step = stb->timestamp_step * stb->insertRows,
- .interlace_step = stb->timestamp_step * stb->interlaceRows
- };
+static void csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write_meta) {
+ write_meta->naming_type = csvGetFileNamingType(stb);
+ write_meta->total_threads = 1;
+ write_meta->db = db;
+ write_meta->stb = stb;
+ write_meta->start_ts = stb->startTimestamp;
+ write_meta->end_ts = stb->startTimestamp + stb->timestamp_step * stb->insertRows;
+ write_meta->ts_step = stb->timestamp_step * stb->insertRows;
+ write_meta->interlace_step = stb->timestamp_step * stb->interlaceRows;
- meta.naming_type = csvGetFileNamingType(stb);
switch (meta.naming_type) {
case CSV_NAMING_I_SINGLE: {
break;
}
case CSV_NAMING_I_TIME_SLICE: {
- csvCalcTimestampStep(&meta);
+ csvCalcTimestampStep(write_meta);
break;
}
case CSV_NAMING_B_THREAD: {
meta.total_threads = g_arguments->nthreads;
- csvGenThreadFormatter(&meta);
+ csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
meta.total_threads = g_arguments->nthreads;
- csvGenThreadFormatter(&meta);
- csvCalcTimestampStep(&meta);
+ csvGenThreadFormatter(write_meta);
+ csvCalcTimestampStep(write_meta);
break;
}
default: {
@@ -178,25 +174,26 @@ static CsvWriteMeta csvInitWriteMeta(SDataBase* db, SSuperTable* stb) {
}
}
- return meta;
+ return;
}
-static CsvThreadMeta csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id) {
+static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvThreadMeta* thread_meta) {
SDataBase* db = write_meta->db;
SSuperTable* stb = write_meta->stb;
- CsvThreadMeta meta = {
- .ctb_start_idx = 0,
- .ctb_end_idx = 0,
- .ctb_count = 0,
- .start_secs = 0,
- .end_secs = 0,
- .thread_id = thread_id,
- .tags_buf_bucket = NULL,
- .cols_buf = NULL
- };
- csvCalcCtbRange(&meta, write_meta->total_threads, stb->childTblFrom, stb->childTblCount);
+ thread_meta->ctb_start_idx = 0;
+ thread_meta->ctb_end_idx = 0;
+ thread_meta->ctb_count = 0;
+ thread_meta->start_secs = 0;
+ thread_meta->end_secs = 0;
+ thread_meta->thread_id = thread_id;
+ thread_meta->output_header = false;
+ thread_meta->tags_buf_size = 0;
+ thread_meta->tags_buf_bucket = NULL;
+ thread_meta->cols_buf = NULL;
+
+ csvCalcCtbRange(write_meta, write_meta->total_threads, stb->childTblFrom, stb->childTblCount);
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE:
@@ -205,17 +202,17 @@ static CsvThreadMeta csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread
}
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- meta.start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp);
- meta.end_secs = meta.start_secs + g_arguments->csv_ts_intv_secs;
+ thread_meta->start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp);
+ thread_meta->end_secs = thread_meta->start_secs + g_arguments->csv_ts_intv_secs;
break;
}
default: {
- meta.naming_type = CSV_NAMING_I_SINGLE;
+ thread_meta->naming_type = CSV_NAMING_I_SINGLE;
break;
}
}
- return meta;
+ return;
}
@@ -576,23 +573,28 @@ end:
static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
int ret = 0;
- CsvWriteMeta write_meta = csvInitWriteMeta(db, stb);
pthread_t* pids = benchCalloc(write_meta.total_threads, sizeof(pthread_t), false);
if (!pids) {
ret = -1;
goto end;
}
+ CsvWriteMeta* write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
+ if (!args) {
+ ret = -1;
+ goto end;
+ }
CsvThreadArgs* args = benchCalloc(write_meta.total_threads, sizeof(CsvThreadArgs), false);
if (!args) {
ret = -1;
goto end;
}
- for (uint32_t i = 0; (i < write_meta.total_threads && !g_arguments->terminate); ++i) {
+ csvInitWriteMeta(db, stb, write_meta);
+ for (uint32_t i = 0; (i < write_meta->total_threads && !g_arguments->terminate); ++i) {
CsvThreadArgs* arg = &args[i];
- arg->write_meta = &write_meta;
- arg->thread_meta = csvInitThreadMeta(&write_meta, i + 1);
+ arg->write_meta = write_meta;
+ csvInitThreadMeta(write_meta, i + 1, &arg->thread_meta);
ret = pthread_create(&pids[i], NULL, csvGenStbThread, arg);
if (!ret) {
@@ -602,13 +604,14 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
}
// wait threads
- for (uint32_t i = 0; i < write_meta.total_threads; ++i) {
+ for (uint32_t i = 0; i < write_meta->total_threads; ++i) {
infoPrint("pthread_join %d ...\n", i);
pthread_join(pids[i], NULL);
}
end:
tmfree(pids);
+ tmfree(write_meta);
tmfree(args);
return ret;
}
From 8203389adab54cb76fb9c4e180645a431000964e Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 27 Feb 2025 16:18:12 +0800
Subject: [PATCH 037/105] feat: csv supports optional table header
---
tools/taos-tools/inc/benchCsv.h | 2 +
tools/taos-tools/src/benchCsv.c | 83 +++++++++++++++++++++++++--------
2 files changed, 66 insertions(+), 19 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 717dfd8a71..c522a12c50 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -41,6 +41,8 @@ typedef struct {
CsvNamingType naming_type;
size_t total_threads;
char thread_formatter[TINY_BUFF_LEN];
+ char csv_header[LARGE_BUFF_LEN];
+ int csv_header_length;
SDataBase* db;
SSuperTable* stb;
int64_t start_ts;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 673e223959..f1aacaec66 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -138,9 +138,44 @@ static void csvGenThreadFormatter(CsvWriteMeta* meta) {
}
-static void csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write_meta) {
+static int csvGenCsvHeader(CsvWriteMeta* write_meta) {
+ SDataBase* db = write_meta->db;
+ SSuperTable* stb = write_meta->stb;
+ char* buf = write_meta->csv_header;
+ int pos = 0;
+ int size = sizeof(write_meta->csv_header);
+
+ if (!g_arguments->csv_output_header) {
+ return 0;
+ }
+
+ // ts
+ pos += snprintf(buf + pos, size - pos, "ts");
+
+ // columns
+ for (size_t i = 0; i < stb->cols->size; ++i) {
+ Field* col = benchArrayGet(stb->cols, i);
+ pos += snprintf(buf + pos, size - pos, ",%s", col->name);
+ }
+
+ // tbname
+ pos += snprintf(buf + pos, size - pos, ",%s", g_arguments->csv_tbname_alias);
+
+ // tags
+ for (size_t i = 0; i < stb->tags->size; ++i) {
+ Field* tag = benchArrayGet(stb->tags, i);
+ pos += snprintf(buf + pos, size - pos, ",%s", tag->name);
+ }
+
+ write_meta->csv_header_length = (pos > 0 && pos < size) ? pos : 0;
+ return (pos > 0 && pos < size) ? 0 : -1;
+}
+
+
+static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write_meta) {
write_meta->naming_type = csvGetFileNamingType(stb);
write_meta->total_threads = 1;
+ write_meta->csv_header_length = 0;
write_meta->db = db;
write_meta->stb = stb;
write_meta->start_ts = stb->startTimestamp;
@@ -148,6 +183,12 @@ static void csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* writ
write_meta->ts_step = stb->timestamp_step * stb->insertRows;
write_meta->interlace_step = stb->timestamp_step * stb->interlaceRows;
+ int ret = csvGenCsvHeader(write_meta);
+ if (ret < 0) {
+ errorPrint("Failed to generate csv header data. database: %s, super table: %s, naming type: %d.\n",
+ db->dbName, stb->stbName, write_meta->naming_type);
+ return -1;
+ }
switch (meta.naming_type) {
case CSV_NAMING_I_SINGLE: {
@@ -174,7 +215,7 @@ static void csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* writ
}
}
- return;
+ return 0;
}
@@ -428,7 +469,8 @@ static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck,
CsvRowTagsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
CsvRowColsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
CsvRowColsBuf* cols_buf = thread_meta->cols_buf;
- int ret = 0;
+ int ret = 0;
+ size_t written = 0;
ret = csvGenRowColData(cols_buf->buf, cols_buf->buf_size, stb, cur_ts, db->precision, ck);
@@ -440,26 +482,22 @@ static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck,
cols_buf->length = ret;
-
// write header
if (thread_meta->output_header) {
- // TODO
-
+ written = fwrite(write_meta->csv_header, 1, write_meta->csv_header_length, fp);
thread_meta->output_header = false;
}
-
// write columns
- size_t written = fwrite(cols_buf->buf, 1, cols_buf->length, fp);
+ written = fwrite(cols_buf->buf, 1, cols_buf->length, fp);
if (written != cols_buf->length) {
errorPrint("Failed to write csv column data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
cols_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
-
// write tags
- size_t written = fwrite(tags_buf->buf, 1, tags_buf->length, fp);
+ written = fwrite(tags_buf->buf, 1, tags_buf->length, fp);
if (written != tags_buf->length) {
errorPrint("Failed to write csv tag data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
tags_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
@@ -574,23 +612,30 @@ end:
static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
int ret = 0;
- pthread_t* pids = benchCalloc(write_meta.total_threads, sizeof(pthread_t), false);
- if (!pids) {
- ret = -1;
- goto end;
- }
CsvWriteMeta* write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
- if (!args) {
+ if (!write_meta) {
ret = -1;
goto end;
}
- CsvThreadArgs* args = benchCalloc(write_meta.total_threads, sizeof(CsvThreadArgs), false);
+
+ ret = csvInitWriteMeta(db, stb, write_meta);
+ if (ret < 0) {
+ ret = -1;
+ goto end;
+ }
+
+ CsvThreadArgs* args = benchCalloc(write_meta->total_threads, sizeof(CsvThreadArgs), false);
if (!args) {
ret = -1;
goto end;
}
- csvInitWriteMeta(db, stb, write_meta);
+ pthread_t* pids = benchCalloc(write_meta.total_threads, sizeof(pthread_t), false);
+ if (!pids) {
+ ret = -1;
+ goto end;
+ }
+
for (uint32_t i = 0; (i < write_meta->total_threads && !g_arguments->terminate); ++i) {
CsvThreadArgs* arg = &args[i];
arg->write_meta = write_meta;
@@ -611,8 +656,8 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
end:
tmfree(pids);
- tmfree(write_meta);
tmfree(args);
+ tmfree(write_meta);
return ret;
}
From 32b575f73a31cbff9f2cd530ef97541cc2b302aa Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 28 Feb 2025 15:26:20 +0800
Subject: [PATCH 038/105] feat: csv supports gzip
---
tools/taos-tools/inc/bench.h | 3 +-
tools/taos-tools/inc/benchCsv.h | 26 ++++-
tools/taos-tools/src/benchCsv.c | 143 ++++++++++++++++++++++++----
tools/taos-tools/src/benchJsonOpt.c | 17 ++++
4 files changed, 166 insertions(+), 23 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index 4dd19d83b9..4f41abb903 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -783,6 +783,7 @@ typedef struct SArguments_S {
bool mistMode;
bool escape_character;
bool pre_load_tb_meta;
+ bool bind_vgroup;
char* output_path;
char output_path_buf[MAX_PATH_LEN];
@@ -792,8 +793,8 @@ typedef struct SArguments_S {
long csv_ts_intv_secs;
bool csv_output_header;
bool csv_tbname_alias;
+ CsvCompressionLevel csv_compress_level;
- bool bind_vgroup;
} SArguments;
typedef struct SBenchConn {
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index c522a12c50..2db2ec324e 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -16,7 +16,8 @@
#ifndef INC_BENCHCSV_H_
#define INC_BENCHCSV_H_
-#include
+#include
+#include "bench.h"
typedef enum {
@@ -26,6 +27,29 @@ typedef enum {
CSV_NAMING_B_THREAD_TIME_SLICE
} CsvNamingType;
+typedef enum {
+ CSV_COMPRESS_NONE = 0,
+ CSV_COMPRESS_FAST = 1,
+ CSV_COMPRESS_BALANCE = 6,
+ CSV_COMPRESS_BEST = 9
+} CsvCompressionLevel;
+
+typedef enum {
+ CSV_ERR_OK = 0,
+ CSV_ERR_OPEN_FAILED,
+ CSV_ERR_WRITE_FAILED
+} CsvIoError;
+
+typedef struct {
+ const char* filename;
+ CsvCompressionLevel compress_level;
+ CsvIoError result;
+ union {
+ gzFile gf;
+ FILE* fp;
+ } handle;
+} CsvFileHandle;
+
typedef struct {
char* buf;
int length;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index f1aacaec66..97a1a74c0f 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -280,6 +280,15 @@ static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_
}
+static const char* csvGetGzipFilePrefix() {
+ if (g_arguments->csv_compress_level == CSV_COMPRESS_NONE) {
+ return "";
+ } else {
+ return ".gz"
+ }
+}
+
+
static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta, char* fullname, size_t size) {
char thread_buf[SMALL_BUFF_LEN];
char start_time_buf[MIDDLE_BUFF_LEN];
@@ -287,28 +296,29 @@ static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_me
int ret = -1;
const char* base_path = g_arguments->output_path;
const char* file_prefix = g_arguments->csv_file_prefix;
+ const char* gzip_suffix = csvGetGzipFilePrefix();
switch (meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
- ret = snprintf(fullname, size, "%s%s.csv", base_path, file_prefix);
+ ret = snprintf(fullname, size, "%s%s.csv%s", base_path, file_prefix, g_arguments->csv_compress_level, gzip_suffix);
break;
}
case CSV_NAMING_I_TIME_SLICE: {
csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
- ret = snprintf(fullname, size, "%s%s_%s_%s.csv", base_path, file_prefix, start_time_buf, end_time_buf);
+ ret = snprintf(fullname, size, "%s%s_%s_%s.csv%s", base_path, file_prefix, start_time_buf, end_time_buf, gzip_suffix);
break;
}
case CSV_NAMING_B_THREAD: {
(void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
- ret = snprintf(fullname, size, "%s%s_%s.csv", base_path, file_prefix, thread_buf);
+ ret = snprintf(fullname, size, "%s%s_%s.csv%s", base_path, file_prefix, thread_buf, gzip_suffix);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
(void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
- ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf);
+ ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv%s", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf, gzip_suffix);
break;
}
default: {
@@ -463,14 +473,91 @@ static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowTagsBuf* tags_bu
}
-static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck, CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
+static CsvFileHandle* csvOpen(const char* filename, CsvCompressionLevel compress_level) {
+ CsvFileHandle* fhdl = NULL;
+ bool failed = false;
+
+ fhdl = (CsvFileHandle*)benchCalloc(1, sizeof(CsvFileHandle), false);
+ if (!fhdl) {
+ errorPrint("Failed to malloc csv file handle. filename: %s, compress level: %d.\n",
+ filename, compress_level);
+ return NULL;
+ }
+
+ if (compress_level == CSV_COMPRESS_NONE) {
+ fhdl.handle.fp = fopen(filename, "w");
+ failed = (!fhdl.handle.fp);
+ } else {
+ char mode[TINY_BUFF_LEN];
+ (void)snprintf(mode, sizeof(mode), "wb%d", compress_level);
+ fhdl.handle.gf = gzopen(filename, mode);
+ failed = (!fhdl.handle.gf);
+ }
+
+ if (failed) {
+ tmfree(fhdl);
+ errorPrint("Failed to open csv file handle. filename: %s, compress level: %d.\n",
+ filename, compress_level);
+ return NULL;
+ } else {
+ fhdl->filename = filename;
+ fhdl->compress_level = compress_level;
+ fhdl->result = CSV_ERR_OK;
+ return fhdl;
+ }
+}
+
+
+static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
+ if (fhdl->compress_level == CSV_COMPRESS_NONE) {
+ size_t ret = fwrite(buf, 1, size, fhdl->handle.fp);
+ if (ret != size) {
+ errorPrint("Failed to write csv file: %s. expected written %zu but %zu.\n",
+ fhdl->filename, size, ret);
+ if (ferror(fhdl->handle.fp)) {
+ perror("error");
+ }
+ fhdl->result = CSV_ERR_WRITE_FAILED;
+ return CSV_ERR_WRITE_FAILED;
+ }
+ } else {
+ unsigned int ret = gzwrite(fhdl->handle.gf, buf, size);
+ if (ret != size) {
+ errorPrint("Failed to write csv file: %s. expected written %zu but %zu.\n",
+ fhdl->filename, size, ret);
+ int errnum;
+ const char* errmsg = gzerror(fhdl->handle.gf, &errnum);
+ errorPrint("gzwrite error: %s\n", errmsg);
+ fhdl->result = CSV_ERR_WRITE_FAILED;
+ return CSV_ERR_WRITE_FAILED;
+ }
+ }
+ return CSV_ERR_OK;
+}
+
+
+static void csvClose(CsvFileHandle* fhdl) {
+ if (fhdl->compress_level == CSV_COMPRESS_NONE) {
+ if (fhdl->handle.fp) {
+ fclose(fhdl->handle.fp);
+ fhdl->handle.fp = NULL;
+ }
+ } else {
+ if (fhdl->handle.gf) {
+ gzclose(fhdl->handle.gf);
+ fhdl->handle.gf = NULL;
+ }
+ }
+}
+
+
+static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck, CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
SDataBase* db = write_meta->db;
SSuperTable* stb = write_meta->stb;
CsvRowTagsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
CsvRowColsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
CsvRowColsBuf* cols_buf = thread_meta->cols_buf;
- int ret = 0;
- size_t written = 0;
+ int ret = 0;
ret = csvGenRowColData(cols_buf->buf, cols_buf->buf_size, stb, cur_ts, db->precision, ck);
@@ -484,23 +571,37 @@ static int csvWriteFile(FILE* fp, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck,
// write header
if (thread_meta->output_header) {
- written = fwrite(write_meta->csv_header, 1, write_meta->csv_header_length, fp);
+ ret = csvWrite(fhdl, write_meta->csv_header, write_meta->csv_header_length);
+ if (ret != CSV_ERR_OK) {
+ errorPrint("Failed to write csv header data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
+ return -1;
+ }
+
thread_meta->output_header = false;
}
// write columns
- written = fwrite(cols_buf->buf, 1, cols_buf->length, fp);
- if (written != cols_buf->length) {
+ ret = csvWrite(fhdl, cols_buf->buf, cols_buf->length);
+ if (ret != CSV_ERR_OK) {
errorPrint("Failed to write csv column data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
- cols_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
// write tags
- written = fwrite(tags_buf->buf, 1, tags_buf->length, fp);
- if (written != tags_buf->length) {
- errorPrint("Failed to write csv tag data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
- tags_buf->length, written, db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ ret = csvWrite(fhdl, tags_buf->buf, tags_buf->length);
+ if (ret != CSV_ERR_OK) {
+ errorPrint("Failed to write csv tag data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
+ return -1;
+ }
+
+ // write line break
+ ret = csvWrite(fhdl, "\n", 1);
+ if (ret != CSV_ERR_OK) {
+ errorPrint("Failed to write csv line break data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -523,7 +624,7 @@ static void* csvGenStbThread(void* arg) {
int64_t ck = 0;
uint64_t ctb_idx = 0;
int ret = 0;
- FILE* fp = NULL;
+ CsvFileHandle* fhdl = NULL;
char fullname[MAX_PATH_LEN] = {};
@@ -565,8 +666,8 @@ static void* csvGenStbThread(void* arg) {
}
// create fd
- fp = fopen(fullname, "w");
- if (fp == NULL) {
+ fhdl = csvOpen(fullname, g_arguments->csv_compress_level);
+ if (fhdl == NULL) {
errorPrint("Failed to create csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
goto end;
@@ -583,11 +684,11 @@ static void* csvGenStbThread(void* arg) {
for (ctb_idx = 0; ctb_idx < thread_meta->ctb_count; ++ctb_idx) {
for (slice_ctb_cur_ts = slice_cur_ts; slice_ctb_cur_ts < slice_batch_ts; slice_ctb_cur_ts += write_meta->stb->timestamp_step) {
- ret = csvWriteFile(fp, ctb_idx, slice_ctb_cur_ts, &ck, write_meta, thread_meta);
+ ret = csvWriteFile(fhdl, ctb_idx, slice_ctb_cur_ts, &ck, write_meta, thread_meta);
if (!ret) {
errorPrint("Failed to write csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
- fclose(fp);
+ csvClose(fhdl);
goto end;
}
@@ -598,7 +699,7 @@ static void* csvGenStbThread(void* arg) {
slice_cur_ts = slice_batch_ts;
}
- fclose(fp);
+ csvClose(fhdl);
csvUpdateSliceRange(write_meta, thread_meta, last_end_ts);
}
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 4cf690204e..26c6200157 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -14,6 +14,7 @@
#include
#include
#include "benchLog.h"
+#include "benchCsv.h"
extern char g_configDir[MAX_PATH_LEN];
@@ -1636,6 +1637,22 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
g_arguments->csv_tbname_alias = "device_id";
}
+ // csv compression level
+ tools_cJSON* cl = tools_cJSON_GetObjectItem(json, "csv_compress_level");
+ if (cl && cl->type == tools_cJSON_String && cl->valuestring != NULL) {
+ if (0 == strcasecmp(cl->valuestring, "none")) {
+ g_arguments->csv_compress_level = CSV_COMPRESS_NONE;
+ } else if (0 == strcasecmp(cl->valuestring, "fast")) {
+ g_arguments->csv_compress_level = CSV_COMPRESS_FAST;
+ } else if (0 == strcasecmp(cl->valuestring, "balance")) {
+ g_arguments->csv_compress_level = CSV_COMPRESS_BALANCE;
+ } else if (0 == strcasecmp(cl->valuestring, "best")) {
+ g_arguments->csv_compress_level = CSV_COMPRESS_BEST;
+ }
+ } else {
+ g_arguments->csv_compress_level = CSV_COMPRESS_NONE;
+ }
+
code = 0;
return code;
}
From 13d845935e9eb3168094632434815a839004906b Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 28 Feb 2025 16:01:28 +0800
Subject: [PATCH 039/105] feat: abort exit message when writing to csv
---
tools/taos-tools/src/benchCsv.c | 13 ++++++++++++-
1 file changed, 12 insertions(+), 1 deletion(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 97a1a74c0f..e7db3481b1 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -693,6 +693,11 @@ static void* csvGenStbThread(void* arg) {
}
ck += 1;
+
+ if (!g_arguments->terminate) {
+ csvClose(fhdl);
+ goto end;
+ }
}
}
@@ -705,13 +710,14 @@ static void* csvGenStbThread(void* arg) {
end:
csvFreeCtbTagData(tags_buf_bucket);
- tmfree(cols_buf);
+ tmfree(buf);
return NULL;
}
static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
int ret = 0;
+ bool prompt = true;
CsvWriteMeta* write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
if (!write_meta) {
@@ -751,6 +757,11 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
// wait threads
for (uint32_t i = 0; i < write_meta->total_threads; ++i) {
+ if (g_arguments->terminate && prompt) {
+ infoPrint("Operation cancelled by user, exiting gracefully...\n");
+ prompt = false;
+ }
+
infoPrint("pthread_join %d ...\n", i);
pthread_join(pids[i], NULL);
}
From 695e92110568fad2381d613a1f9469c1b0300333 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 28 Feb 2025 17:39:11 +0800
Subject: [PATCH 040/105] feat: csv writing statitics
---
tools/taos-tools/inc/benchCsv.h | 1 +
tools/taos-tools/src/benchCsv.c | 78 +++++++++++++++++++++++++++------
2 files changed, 65 insertions(+), 14 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 2db2ec324e..11666a0b45 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -79,6 +79,7 @@ typedef struct {
uint64_t ctb_start_idx;
uint64_t ctb_end_idx;
uint64_t ctb_count;
+ uint64_t total_rows;
time_t start_secs;
time_t end_secs;
size_t thread_id;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index e7db3481b1..92641f8ea8 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -622,11 +622,19 @@ static void* csvGenStbThread(void* arg) {
int64_t slice_batch_ts = 0;
int64_t slice_ctb_cur_ts = 0;
int64_t ck = 0;
- uint64_t ctb_idx = 0;
+ uint64_t ctb_idx = 0;
int ret = 0;
CsvFileHandle* fhdl = NULL;
char fullname[MAX_PATH_LEN] = {};
+ uint64_t total_rows = 0;
+ uint64_t pre_total_rows = 0;
+ uint64_t file_rows = 0;
+ int64_t start_print_ts = 0;
+ int64_t pre_print_ts = 0;
+ int64_t cur_print_ts = 0;
+ int64_t print_ts_elapse = 0;
+
// tags buffer
CsvRowTagsBuf* tags_buf_bucket = csvGenCtbTagData(write_meta, thread_meta);
@@ -653,7 +661,7 @@ static void* csvGenStbThread(void* arg) {
thread_meta->tags_buf_bucket = tags_buf_bucket;
thread_meta->cols_buf = &cols_buf;
-
+ start_print_ts = toolsGetTimestampMs();
for (cur_ts = write_meta->start_ts; cur_ts < write_meta->end_ts; cur_ts += write_meta->ts_step) {
// get filename
@@ -677,6 +685,9 @@ static void* csvGenStbThread(void* arg) {
thread_meta->output_header = g_arguments->csv_output_header;
slice_cur_ts = cur_ts;
slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
+ file_rows = 0;
+
+ infoPrint("thread[%d] begin to write csv file: %s.\n", thread_meta->thread_id, fullname);
// write data
while (slice_cur_ts < slice_end_ts) {
@@ -692,7 +703,20 @@ static void* csvGenStbThread(void* arg) {
goto end;
}
- ck += 1;
+ ck += 1;
+ total_rows += 1;
+ file_rows += 1;
+
+ cur_print_ts = toolsGetTimestampMs();
+ print_ts_elapse = cur_print_ts - pre_print_ts;
+ if (print_ts_elapse > 30000) {
+ infoPrint("thread[%d] has currently inserted rows: %" PRIu64 ", period insert rate: %.2f rows/s.\n",
+ thread_meta->thread_id, total_rows, (total_rows - pre_total_rows) * 1000.0 / print_ts_elapse);
+
+ pre_print_ts = cur_print_ts;
+ pre_total_rows = total_rows;
+ }
+
if (!g_arguments->terminate) {
csvClose(fhdl);
@@ -708,7 +732,14 @@ static void* csvGenStbThread(void* arg) {
csvUpdateSliceRange(write_meta, thread_meta, last_end_ts);
}
+ cur_print_ts = toolsGetTimestampMs();
+ print_ts_elapse = cur_print_ts - start_print_ts;
+
+ succPrint("thread [%d] has completed inserting rows: %" PRIu64 ", insert rate %.2f rows/s.\n",
+ thread_meta->thread_id, total_rows, total_rows * 1000.0 / print_ts_elapse);
+
end:
+ thread_meta->total_rows = total_rows;
csvFreeCtbTagData(tags_buf_bucket);
tmfree(buf);
return NULL;
@@ -716,8 +747,12 @@ end:
static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
- int ret = 0;
- bool prompt = true;
+ int ret = 0;
+ bool prompt = true;
+ uint64_t total_rows = 0;
+ int64_t start_ts = 0;
+ int64_t ts_elapse = 0;
+
CsvWriteMeta* write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
if (!write_meta) {
@@ -743,6 +778,7 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
goto end;
}
+ start_ts = toolsGetTimestampMs();
for (uint32_t i = 0; (i < write_meta->total_threads && !g_arguments->terminate); ++i) {
CsvThreadArgs* arg = &args[i];
arg->write_meta = write_meta;
@@ -766,6 +802,20 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
pthread_join(pids[i], NULL);
}
+ // statistics
+ total_rows = 0;
+ for (uint32_t i = 0; i < write_meta->total_threads; ++i) {
+ CsvThreadArgs* arg = &args[i];
+ CsvThreadMeta* thread_meta = &arg->thread_meta;
+ total_rows += thread_meta->total_rows;
+ }
+
+ ts_elapse = toolsGetTimestampMs() - start_ts;
+ if (ts_elapse > 0) {
+ succPrint("Spent %.6f seconds to insert rows: %" PRIu64 " with %d thread(s) into %s, at a rate of %.2f rows/s.\n",
+ ts_elapse / 1000.0, total_rows, write_meta->total_threads, g_arguments->output_path, total_rows * 1000.0 / ts_elapse);
+ }
+
end:
tmfree(pids);
tmfree(args);
@@ -859,14 +909,14 @@ static int csvParseParameter() {
// csv_output_path
size_t len = strlen(g_arguments->output_path);
if (len == 0) {
- errorPrint("Failed to generate CSV files, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
+ errorPrint("Failed to generate csv files, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
db->dbName, stb->stbName);
return -1;
}
if (g_arguments->output_path[len - 1] != '/') {
int n = snprintf(g_arguments->output_path_buf, sizeof(g_arguments->output_path_buf), "%s/", g_arguments->output_path);
if (n < 0 || n >= sizeof(g_arguments->output_path_buf)) {
- errorPrint("Failed to generate CSV files, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
+ errorPrint("Failed to generate csv files, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
g_arguments->csv_output_path, db->dbName, stb->stbName);
return -1;
}
@@ -876,7 +926,7 @@ static int csvParseParameter() {
// csv_ts_format
if (g_arguments->csv_ts_format) {
if (csvValidateParamTsFormat(g_arguments->csv_ts_format) != 0) {
- errorPrint("Failed to generate CSV files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
g_arguments->csv_ts_format, db->dbName, stb->stbName);
return -1;
}
@@ -885,7 +935,7 @@ static int csvParseParameter() {
// csv_ts_interval
long csv_ts_intv_secs = csvValidateParamTsInterval(g_arguments->csv_ts_interval);
if (csv_ts_intv_secs <= 0) {
- errorPrint("Failed to generate CSV files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
g_arguments->csv_ts_interval, db->dbName, stb->stbName);
return -1;
}
@@ -910,7 +960,7 @@ static int csvWriteThread() {
// gen csv
int ret = csvGenStb(db, stb);
if(ret != 0) {
- errorPrint("Failed to generate CSV files. database: %s, super table: %s, error code: %d.\n",
+ errorPrint("Failed to generate csv files. database: %s, super table: %s, error code: %d.\n",
db->dbName, stb->stbName, ret);
return -1;
}
@@ -928,14 +978,14 @@ int csvTestProcess() {
return -1;
}
- infoPrint("Starting to output data to CSV files in directory: %s ...\n", g_arguments->output_path);
+ infoPrint("Starting to output data to csv files in directory: %s ...\n", g_arguments->output_path);
int64_t start = toolsGetTimestampMs();
int ret = csvWriteThread();
if (ret != 0) {
return -1;
}
- int64_t delay = toolsGetTimestampMs() - start;
- infoPrint("Generating CSV files in directory: %s has been completed. Time elapsed: %.3f seconds\n",
- g_arguments->output_path, delay / 1000.0);
+ int64_t elapse = toolsGetTimestampMs() - start;
+ infoPrint("Generating csv files in directory: %s has been completed. Time elapsed: %.3f seconds\n",
+ g_arguments->output_path, elapse / 1000.0);
return 0;
}
From e91014f807b6f2981ee6d9cc43985fdf1912b953 Mon Sep 17 00:00:00 2001
From: factosea <285808407@qq.com>
Date: Tue, 25 Feb 2025 18:21:32 +0800
Subject: [PATCH 041/105] feat: show variables like
---
include/common/tcommon.h | 2 +-
include/common/tmsg.h | 7 ++--
include/util/tcompare.h | 1 +
source/client/src/clientMsgHandler.c | 40 +++++++++++----------
source/common/src/msg/tmsg.c | 32 ++++++++++++++++-
source/common/src/tmisce.c | 6 +++-
source/dnode/mgmt/mgmt_dnode/src/dmHandle.c | 2 +-
source/dnode/mnode/impl/src/mndConfig.c | 21 ++++++++---
source/libs/command/src/command.c | 28 ++++++++++-----
source/libs/parser/inc/parAst.h | 1 +
source/libs/parser/inc/sql.y | 6 ++--
source/libs/parser/src/parAstCreater.c | 16 +++++++++
source/libs/parser/src/parTranslater.c | 7 ++++
source/util/src/tcompare.c | 13 +++++++
14 files changed, 141 insertions(+), 41 deletions(-)
diff --git a/include/common/tcommon.h b/include/common/tcommon.h
index bd5bdb927d..dae9eab31e 100644
--- a/include/common/tcommon.h
+++ b/include/common/tcommon.h
@@ -414,7 +414,7 @@ typedef struct STUidTagInfo {
#define NOTIFY_EVENT_STR_COLUMN_INDEX 0
int32_t taosGenCrashJsonMsg(int signum, char** pMsg, int64_t clusterId, int64_t startTime);
-int32_t dumpConfToDataBlock(SSDataBlock* pBlock, int32_t startCol);
+int32_t dumpConfToDataBlock(SSDataBlock* pBlock, int32_t startCol, char* likePattern);
#define TSMA_RES_STB_POSTFIX "_tsma_res_stb_"
#define MD5_OUTPUT_LEN 32
diff --git a/include/common/tmsg.h b/include/common/tmsg.h
index 6d58748a3b..305e6f02ed 100644
--- a/include/common/tmsg.h
+++ b/include/common/tmsg.h
@@ -2247,11 +2247,14 @@ typedef struct {
} STagData;
typedef struct {
- int32_t useless; // useless
+ int32_t opType;
+ uint32_t valLen;
+ char* val;
} SShowVariablesReq;
int32_t tSerializeSShowVariablesReq(void* buf, int32_t bufLen, SShowVariablesReq* pReq);
-// int32_t tDeserializeSShowVariablesReq(void* buf, int32_t bufLen, SShowVariablesReq* pReq);
+int32_t tDeserializeSShowVariablesReq(void* buf, int32_t bufLen, SShowVariablesReq* pReq);
+void tFreeSShowVariablesReq(SShowVariablesReq* pReq);
typedef struct {
char name[TSDB_CONFIG_OPTION_LEN + 1];
diff --git a/include/util/tcompare.h b/include/util/tcompare.h
index c7a29cad57..09b35bbc8c 100644
--- a/include/util/tcompare.h
+++ b/include/util/tcompare.h
@@ -47,6 +47,7 @@ typedef struct SPatternCompareInfo {
int32_t InitRegexCache();
void DestroyRegexCache();
+int32_t rawStrPatternMatch(const char *pattern, const char *str);
int32_t patternMatch(const char *pattern, size_t psize, const char *str, size_t ssize, const SPatternCompareInfo *pInfo);
int32_t checkRegexPattern(const char *pPattern);
void DestoryThreadLocalRegComp();
diff --git a/source/client/src/clientMsgHandler.c b/source/client/src/clientMsgHandler.c
index 58ba39864d..d3a0aaca18 100644
--- a/source/client/src/clientMsgHandler.c
+++ b/source/client/src/clientMsgHandler.c
@@ -628,26 +628,30 @@ static int32_t buildShowVariablesRsp(SArray* pVars, SRetrieveTableRsp** pRsp) {
(*pRsp)->numOfRows = htobe64((int64_t)pBlock->info.rows);
(*pRsp)->numOfCols = htonl(SHOW_VARIABLES_RESULT_COLS);
- int32_t len = blockEncode(pBlock, (*pRsp)->data + PAYLOAD_PREFIX_LEN, dataEncodeBufSize, SHOW_VARIABLES_RESULT_COLS);
- if (len < 0) {
- uError("buildShowVariablesRsp error, len:%d", len);
- code = terrno;
- goto _exit;
+ int32_t len = 0;
+ if ((*pRsp)->numOfRows > 0) {
+ len = blockEncode(pBlock, (*pRsp)->data + PAYLOAD_PREFIX_LEN, dataEncodeBufSize, SHOW_VARIABLES_RESULT_COLS);
+ if (len < 0) {
+ uError("buildShowVariablesRsp error, len:%d", len);
+ code = terrno;
+ goto _exit;
+ }
+ SET_PAYLOAD_LEN((*pRsp)->data, len, len);
+
+ int32_t payloadLen = len + PAYLOAD_PREFIX_LEN;
+ (*pRsp)->payloadLen = htonl(payloadLen);
+ (*pRsp)->compLen = htonl(payloadLen);
+
+ if (payloadLen != rspSize - sizeof(SRetrieveTableRsp)) {
+ uError("buildShowVariablesRsp error, len:%d != rspSize - sizeof(SRetrieveTableRsp):%" PRIu64, len,
+ (uint64_t)(rspSize - sizeof(SRetrieveTableRsp)));
+ code = TSDB_CODE_TSC_INVALID_INPUT;
+ goto _exit;
+ }
}
+
blockDataDestroy(pBlock);
-
- SET_PAYLOAD_LEN((*pRsp)->data, len, len);
-
- int32_t payloadLen = len + PAYLOAD_PREFIX_LEN;
- (*pRsp)->payloadLen = htonl(payloadLen);
- (*pRsp)->compLen = htonl(payloadLen);
-
- if (payloadLen != rspSize - sizeof(SRetrieveTableRsp)) {
- uError("buildShowVariablesRsp error, len:%d != rspSize - sizeof(SRetrieveTableRsp):%" PRIu64, len,
- (uint64_t)(rspSize - sizeof(SRetrieveTableRsp)));
- code = TSDB_CODE_TSC_INVALID_INPUT;
- goto _exit;
- }
+ pBlock = NULL;
return TSDB_CODE_SUCCESS;
_exit:
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index 6a3e1948c8..ea7d332344 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -5807,7 +5807,11 @@ int32_t tSerializeSShowVariablesReq(void *buf, int32_t bufLen, SShowVariablesReq
tEncoderInit(&encoder, buf, bufLen);
TAOS_CHECK_EXIT(tStartEncode(&encoder));
- TAOS_CHECK_EXIT(tEncodeI32(&encoder, pReq->useless));
+ TAOS_CHECK_EXIT(tEncodeI32(&encoder, pReq->opType));
+ TAOS_CHECK_EXIT(tEncodeI32(&encoder, pReq->valLen));
+ if (pReq->valLen > 0) {
+ TAOS_CHECK_EXIT(tEncodeBinary(&encoder, (const uint8_t *)pReq->val, pReq->valLen));
+ }
tEndEncode(&encoder);
_exit:
@@ -5820,6 +5824,32 @@ _exit:
return tlen;
}
+int32_t tDeserializeSShowVariablesReq(void *buf, int32_t bufLen, SShowVariablesReq *pReq) {
+ SDecoder decoder = {0};
+ int32_t code = 0;
+ int32_t lino;
+ tDecoderInit(&decoder, buf, bufLen);
+
+ TAOS_CHECK_EXIT(tStartDecode(&decoder));
+ TAOS_CHECK_EXIT(tDecodeI32(&decoder, &pReq->opType));
+ TAOS_CHECK_EXIT(tDecodeU32(&decoder, &pReq->valLen));
+ if (pReq->valLen > 0) {
+ TAOS_CHECK_EXIT(tDecodeBinary(&decoder, (uint8_t **)&pReq->val, &pReq->valLen));
+ }
+
+ tEndDecode(&decoder);
+_exit:
+ tDecoderClear(&decoder);
+ return code;
+}
+
+void tFreeSShowVariablesReq(SShowVariablesReq *pReq) {
+ if (NULL != pReq && NULL != pReq->val) {
+ taosMemoryFree(pReq->val);
+ pReq->val = NULL;
+ }
+}
+
int32_t tEncodeSVariablesInfo(SEncoder *pEncoder, SVariablesInfo *pInfo) {
TAOS_CHECK_RETURN(tEncodeCStr(pEncoder, pInfo->name));
TAOS_CHECK_RETURN(tEncodeCStr(pEncoder, pInfo->value));
diff --git a/source/common/src/tmisce.c b/source/common/src/tmisce.c
index a966513629..fbe3cba4c4 100644
--- a/source/common/src/tmisce.c
+++ b/source/common/src/tmisce.c
@@ -18,6 +18,7 @@
#include "tglobal.h"
#include "tjson.h"
#include "tmisce.h"
+#include "tcompare.h"
int32_t taosGetFqdnPortFromEp(const char* ep, SEp* pEp) {
pEp->port = 0;
@@ -257,7 +258,7 @@ _exit:
TAOS_RETURN(code);
}
-int32_t dumpConfToDataBlock(SSDataBlock* pBlock, int32_t startCol) {
+int32_t dumpConfToDataBlock(SSDataBlock* pBlock, int32_t startCol, char* likePattern) {
int32_t code = 0;
SConfig* pConf = taosGetCfg();
if (pConf == NULL) {
@@ -291,6 +292,9 @@ int32_t dumpConfToDataBlock(SSDataBlock* pBlock, int32_t startCol) {
// GRANT_CFG_SKIP;
char name[TSDB_CONFIG_OPTION_LEN + VARSTR_HEADER_SIZE] = {0};
+ if (likePattern && rawStrPatternMatch(pItem->name, likePattern) != TSDB_PATTERN_MATCH) {
+ continue;
+ }
STR_WITH_MAXSIZE_TO_VARSTR(name, pItem->name, TSDB_CONFIG_OPTION_LEN + VARSTR_HEADER_SIZE);
SColumnInfoData* pColInfo = taosArrayGet(pBlock->pDataBlock, col++);
diff --git a/source/dnode/mgmt/mgmt_dnode/src/dmHandle.c b/source/dnode/mgmt/mgmt_dnode/src/dmHandle.c
index 0a3543ac07..0bf18525fa 100644
--- a/source/dnode/mgmt/mgmt_dnode/src/dmHandle.c
+++ b/source/dnode/mgmt/mgmt_dnode/src/dmHandle.c
@@ -660,7 +660,7 @@ _exit:
}
int32_t dmAppendVariablesToBlock(SSDataBlock *pBlock, int32_t dnodeId) {
- int32_t code = dumpConfToDataBlock(pBlock, 1);
+ int32_t code = dumpConfToDataBlock(pBlock, 1, NULL);
if (code != 0) {
return code;
}
diff --git a/source/dnode/mnode/impl/src/mndConfig.c b/source/dnode/mnode/impl/src/mndConfig.c
index 099fff7aee..844cd74b6a 100644
--- a/source/dnode/mnode/impl/src/mndConfig.c
+++ b/source/dnode/mnode/impl/src/mndConfig.c
@@ -23,6 +23,7 @@
#include "mndTrans.h"
#include "mndUser.h"
#include "tutil.h"
+#include "tcompare.h"
#define CFG_VER_NUMBER 1
#define CFG_RESERVE_SIZE 63
@@ -807,7 +808,7 @@ static void cfgObjArrayCleanUp(SArray *array) {
taosArrayDestroy(array);
}
-SArray *initVariablesFromItems(SArray *pItems) {
+static SArray *initVariablesFromItems(SArray *pItems, const char* likePattern) {
if (pItems == NULL) {
return NULL;
}
@@ -823,6 +824,9 @@ SArray *initVariablesFromItems(SArray *pItems) {
SConfigItem *pItem = taosArrayGet(pItems, i);
SVariablesInfo info = {0};
tstrncpy(info.name, pItem->name, sizeof(info.name));
+ if (likePattern != NULL && rawStrPatternMatch(pItem->name, likePattern) != TSDB_PATTERN_MATCH) {
+ continue;
+ }
// init info value
switch (pItem->dtype) {
@@ -889,15 +893,23 @@ SArray *initVariablesFromItems(SArray *pItems) {
static int32_t mndProcessShowVariablesReq(SRpcMsg *pReq) {
SShowVariablesRsp rsp = {0};
- int32_t code = -1;
+ int32_t code = TSDB_CODE_SUCCESS;
+ SShowVariablesReq req = {0};
+ SArray *array = NULL;
+
+ code = tDeserializeSShowVariablesReq(pReq->pCont, pReq->contLen, &req);
+ if (code != 0) {
+ mError("failed to deserialize config req, since %s", terrstr());
+ goto _OVER;
+ }
if ((code = mndCheckOperPrivilege(pReq->info.node, pReq->info.conn.user, MND_OPER_SHOW_VARIABLES)) != 0) {
goto _OVER;
}
SVariablesInfo info = {0};
-
- rsp.variables = initVariablesFromItems(taosGetGlobalCfg(tsCfg));
+ char *likePattern = req.opType == OP_TYPE_LIKE ? req.val : NULL;
+ rsp.variables = initVariablesFromItems(taosGetGlobalCfg(tsCfg), likePattern);
if (rsp.variables == NULL) {
code = terrno;
goto _OVER;
@@ -924,7 +936,6 @@ _OVER:
if (code != 0) {
mError("failed to get show variables info since %s", tstrerror(code));
}
-
tFreeSShowVariablesRsp(&rsp);
TAOS_RETURN(code);
}
diff --git a/source/libs/command/src/command.c b/source/libs/command/src/command.c
index e0a917ace9..243f85ff47 100644
--- a/source/libs/command/src/command.c
+++ b/source/libs/command/src/command.c
@@ -20,6 +20,7 @@
#include "systable.h"
#include "taosdef.h"
#include "tdatablock.h"
+#include "tdataformat.h"
#include "tglobal.h"
#include "tgrant.h"
@@ -53,13 +54,16 @@ static int32_t buildRetrieveTableRsp(SSDataBlock* pBlock, int32_t numOfCols, SRe
(*pRsp)->numOfRows = htobe64((int64_t)pBlock->info.rows);
(*pRsp)->numOfCols = htonl(numOfCols);
- int32_t len = blockEncode(pBlock, (*pRsp)->data + PAYLOAD_PREFIX_LEN, dataEncodeBufSize, numOfCols);
- if (len < 0) {
- taosMemoryFree(*pRsp);
- *pRsp = NULL;
- return terrno;
+ int32_t len = 0;
+ if ((*pRsp)->numOfRows > 0) {
+ len = blockEncode(pBlock, (*pRsp)->data + PAYLOAD_PREFIX_LEN, dataEncodeBufSize, numOfCols);
+ if (len < 0) {
+ taosMemoryFree(*pRsp);
+ *pRsp = NULL;
+ return terrno;
+ }
+ SET_PAYLOAD_LEN((*pRsp)->data, len, len);
}
- SET_PAYLOAD_LEN((*pRsp)->data, len, len);
int32_t payloadLen = len + PAYLOAD_PREFIX_LEN;
(*pRsp)->payloadLen = htonl(payloadLen);
@@ -985,11 +989,17 @@ _exit:
return terrno;
}
-static int32_t execShowLocalVariables(SRetrieveTableRsp** pRsp) {
+static int32_t execShowLocalVariables(SShowStmt* pStmt, SRetrieveTableRsp** pRsp) {
SSDataBlock* pBlock = NULL;
+ char* likePattern = NULL;
int32_t code = buildLocalVariablesResultDataBlock(&pBlock);
if (TSDB_CODE_SUCCESS == code) {
- code = dumpConfToDataBlock(pBlock, 0);
+ if (pStmt->tableCondType == OP_TYPE_LIKE) {
+ likePattern = ((SValueNode*)pStmt->pTbName)->literal;
+ }
+ }
+ if (TSDB_CODE_SUCCESS == code) {
+ code = dumpConfToDataBlock(pBlock, 0, likePattern);
}
if (TSDB_CODE_SUCCESS == code) {
code = buildRetrieveTableRsp(pBlock, SHOW_LOCAL_VARIABLES_RESULT_COLS, pRsp);
@@ -1091,7 +1101,7 @@ int32_t qExecCommand(int64_t* pConnId, bool sysInfoUser, SNode* pStmt, SRetrieve
case QUERY_NODE_ALTER_LOCAL_STMT:
return execAlterLocal((SAlterLocalStmt*)pStmt);
case QUERY_NODE_SHOW_LOCAL_VARIABLES_STMT:
- return execShowLocalVariables(pRsp);
+ return execShowLocalVariables((SShowStmt*)pStmt, pRsp);
case QUERY_NODE_SELECT_STMT:
return execSelectWithoutFrom((SSelectStmt*)pStmt, pRsp);
default:
diff --git a/source/libs/parser/inc/parAst.h b/source/libs/parser/inc/parAst.h
index 65274f85e1..7d102d85fb 100644
--- a/source/libs/parser/inc/parAst.h
+++ b/source/libs/parser/inc/parAst.h
@@ -241,6 +241,7 @@ SNode* createUseDatabaseStmt(SAstCreateContext* pCxt, SToken* pDbName);
SNode* setShowKind(SAstCreateContext* pCxt, SNode* pStmt, EShowKind showKind);
SNode* createShowStmt(SAstCreateContext* pCxt, ENodeType type);
SNode* createShowStmtWithFull(SAstCreateContext* pCxt, ENodeType type);
+SNode* createShowStmtWithLike(SAstCreateContext* pCxt, ENodeType type, SNode* pLikePattern);
SNode* createShowStmtWithCond(SAstCreateContext* pCxt, ENodeType type, SNode* pDbName, SNode* pTbName,
EOperatorType tableCondType);
SNode* createShowTablesStmt(SAstCreateContext* pCxt, SShowTablesOption option, SNode* pTbName,
diff --git a/source/libs/parser/inc/sql.y b/source/libs/parser/inc/sql.y
index 42d2e95d24..ef2d57f1d6 100755
--- a/source/libs/parser/inc/sql.y
+++ b/source/libs/parser/inc/sql.y
@@ -593,9 +593,9 @@ cmd ::= SHOW ENCRYPTIONS.
cmd ::= SHOW QUERIES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_QUERIES_STMT); }
cmd ::= SHOW SCORES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_SCORES_STMT); }
cmd ::= SHOW TOPICS. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_TOPICS_STMT); }
-cmd ::= SHOW VARIABLES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_VARIABLES_STMT); }
-cmd ::= SHOW CLUSTER VARIABLES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_VARIABLES_STMT); }
-cmd ::= SHOW LOCAL VARIABLES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_LOCAL_VARIABLES_STMT); }
+cmd ::= SHOW VARIABLES like_pattern_opt(B). { pCxt->pRootNode = createShowStmtWithLike(pCxt, QUERY_NODE_SHOW_VARIABLES_STMT, B); }
+cmd ::= SHOW CLUSTER VARIABLES like_pattern_opt(B). { pCxt->pRootNode = createShowStmtWithLike(pCxt, QUERY_NODE_SHOW_VARIABLES_STMT, B); }
+cmd ::= SHOW LOCAL VARIABLES like_pattern_opt(B). { pCxt->pRootNode = createShowStmtWithLike(pCxt, QUERY_NODE_SHOW_LOCAL_VARIABLES_STMT, B); }
cmd ::= SHOW DNODE NK_INTEGER(A) VARIABLES like_pattern_opt(B). { pCxt->pRootNode = createShowDnodeVariablesStmt(pCxt, createValueNode(pCxt, TSDB_DATA_TYPE_BIGINT, &A), B); }
cmd ::= SHOW BNODES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_BNODES_STMT); }
cmd ::= SHOW SNODES. { pCxt->pRootNode = createShowStmt(pCxt, QUERY_NODE_SHOW_SNODES_STMT); }
diff --git a/source/libs/parser/src/parAstCreater.c b/source/libs/parser/src/parAstCreater.c
index 341fc7e603..bd93dc2fea 100644
--- a/source/libs/parser/src/parAstCreater.c
+++ b/source/libs/parser/src/parAstCreater.c
@@ -2747,6 +2747,22 @@ static bool needDbShowStmt(ENodeType type) {
QUERY_NODE_SHOW_VIEWS_STMT == type || QUERY_NODE_SHOW_TSMAS_STMT == type || QUERY_NODE_SHOW_USAGE_STMT == type;
}
+SNode* createShowStmtWithLike(SAstCreateContext* pCxt, ENodeType type, SNode* pLikePattern) {
+ CHECK_PARSER_STATUS(pCxt);
+ SShowStmt* pStmt = NULL;
+ pCxt->errCode = nodesMakeNode(type, (SNode**)&pStmt);
+ CHECK_MAKE_NODE(pStmt);
+ pStmt->withFull = false;
+ pStmt->pTbName = pLikePattern;
+ if (pLikePattern) {
+ pStmt->tableCondType = OP_TYPE_LIKE;
+ }
+ return (SNode*)pStmt;
+_err:
+ nodesDestroyNode(pLikePattern);
+ return NULL;
+}
+
SNode* createShowStmt(SAstCreateContext* pCxt, ENodeType type) {
CHECK_PARSER_STATUS(pCxt);
SShowStmt* pStmt = NULL;
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index 2fdba9bad9..0815b443a7 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -13244,6 +13244,13 @@ static int32_t translateSplitVgroup(STranslateContext* pCxt, SSplitVgroupStmt* p
static int32_t translateShowVariables(STranslateContext* pCxt, SShowStmt* pStmt) {
SShowVariablesReq req = {0};
+ req.opType = pStmt->tableCondType;
+ if (req.opType == OP_TYPE_LIKE && pStmt->pTbName) {
+ req.valLen = strlen(((SValueNode*)pStmt->pTbName)->literal);
+ if (req.valLen > 0) {
+ req.val = taosStrdupi(((SValueNode*)pStmt->pTbName)->literal);
+ }
+ }
return buildCmdMsg(pCxt, TDMT_MND_SHOW_VARIABLES, (FSerializeFunc)tSerializeSShowVariablesReq, &req);
}
diff --git a/source/util/src/tcompare.c b/source/util/src/tcompare.c
index c95030b06e..a253c58415 100644
--- a/source/util/src/tcompare.c
+++ b/source/util/src/tcompare.c
@@ -1123,6 +1123,19 @@ int32_t patternMatch(const char *pattern, size_t psize, const char *str, size_t
return (j >= ssize || str[j] == 0) ? TSDB_PATTERN_MATCH : TSDB_PATTERN_NOMATCH;
}
+int32_t rawStrPatternMatch(const char *str, const char *pattern) {
+ SPatternCompareInfo pInfo = PATTERN_COMPARE_INFO_INITIALIZER;
+
+ size_t pLen = strlen(pattern);
+ size_t sz = strlen(str);
+ if (pLen > TSDB_MAX_FIELD_LEN) {
+ return 1;
+ }
+
+ int32_t ret = patternMatch(pattern, pLen, str, sz, &pInfo);
+ return (ret == TSDB_PATTERN_MATCH) ? 0 : 1;
+}
+
int32_t wcsPatternMatch(const TdUcs4 *pattern, size_t psize, const TdUcs4 *str, size_t ssize,
const SPatternCompareInfo *pInfo) {
TdUcs4 c, c1;
From 36bec2c9585fc016d95e7bb9b963dbc0db23ebd4 Mon Sep 17 00:00:00 2001
From: factosea <285808407@qq.com>
Date: Tue, 25 Feb 2025 23:18:13 +0800
Subject: [PATCH 042/105] fix: rows
---
source/libs/command/src/command.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/source/libs/command/src/command.c b/source/libs/command/src/command.c
index 243f85ff47..1458164e28 100644
--- a/source/libs/command/src/command.c
+++ b/source/libs/command/src/command.c
@@ -55,7 +55,7 @@ static int32_t buildRetrieveTableRsp(SSDataBlock* pBlock, int32_t numOfCols, SRe
(*pRsp)->numOfCols = htonl(numOfCols);
int32_t len = 0;
- if ((*pRsp)->numOfRows > 0) {
+ if (pBlock->info.rows > 0) {
len = blockEncode(pBlock, (*pRsp)->data + PAYLOAD_PREFIX_LEN, dataEncodeBufSize, numOfCols);
if (len < 0) {
taosMemoryFree(*pRsp);
From 6da2e6f54722a0386d7a60b4c66c43cd5890cec2 Mon Sep 17 00:00:00 2001
From: factosea <285808407@qq.com>
Date: Sun, 2 Mar 2025 22:45:58 +0800
Subject: [PATCH 043/105] fix: test case
---
source/common/src/msg/tmsg.c | 7 ++-
source/dnode/mnode/impl/src/mndConfig.c | 1 +
source/libs/parser/src/parTranslater.c | 4 +-
tests/system-test/2-query/db.py | 60 +++++++++++++++++++++++++
4 files changed, 70 insertions(+), 2 deletions(-)
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index ea7d332344..85a5c8dbeb 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -5833,8 +5833,13 @@ int32_t tDeserializeSShowVariablesReq(void *buf, int32_t bufLen, SShowVariablesR
TAOS_CHECK_EXIT(tStartDecode(&decoder));
TAOS_CHECK_EXIT(tDecodeI32(&decoder, &pReq->opType));
TAOS_CHECK_EXIT(tDecodeU32(&decoder, &pReq->valLen));
+
if (pReq->valLen > 0) {
- TAOS_CHECK_EXIT(tDecodeBinary(&decoder, (uint8_t **)&pReq->val, &pReq->valLen));
+ pReq->val = taosMemoryCalloc(1, pReq->valLen + 1);
+ if (pReq->val == NULL) {
+ TAOS_CHECK_EXIT(terrno);
+ }
+ TAOS_CHECK_EXIT(tDecodeCStrTo(&decoder, pReq->val));
}
tEndDecode(&decoder);
diff --git a/source/dnode/mnode/impl/src/mndConfig.c b/source/dnode/mnode/impl/src/mndConfig.c
index 844cd74b6a..16d1bb5a5f 100644
--- a/source/dnode/mnode/impl/src/mndConfig.c
+++ b/source/dnode/mnode/impl/src/mndConfig.c
@@ -936,6 +936,7 @@ _OVER:
if (code != 0) {
mError("failed to get show variables info since %s", tstrerror(code));
}
+ tFreeSShowVariablesReq(&req);
tFreeSShowVariablesRsp(&rsp);
TAOS_RETURN(code);
}
diff --git a/source/libs/parser/src/parTranslater.c b/source/libs/parser/src/parTranslater.c
index 0815b443a7..edbf724a75 100755
--- a/source/libs/parser/src/parTranslater.c
+++ b/source/libs/parser/src/parTranslater.c
@@ -13251,7 +13251,9 @@ static int32_t translateShowVariables(STranslateContext* pCxt, SShowStmt* pStmt)
req.val = taosStrdupi(((SValueNode*)pStmt->pTbName)->literal);
}
}
- return buildCmdMsg(pCxt, TDMT_MND_SHOW_VARIABLES, (FSerializeFunc)tSerializeSShowVariablesReq, &req);
+ int32_t code = buildCmdMsg(pCxt, TDMT_MND_SHOW_VARIABLES, (FSerializeFunc)tSerializeSShowVariablesReq, &req);
+ tFreeSShowVariablesReq(&req);
+ return code;
}
static int32_t translateShowCreateDatabase(STranslateContext* pCxt, SShowCreateDatabaseStmt* pStmt) {
diff --git a/tests/system-test/2-query/db.py b/tests/system-test/2-query/db.py
index f380fdf00b..3408f02e8b 100644
--- a/tests/system-test/2-query/db.py
+++ b/tests/system-test/2-query/db.py
@@ -85,7 +85,59 @@ class TDTestCase:
tdSql.checkData(0, 0, 1)
tdSql.checkData(0, 1, 's3UploadDelaySec')
tdSql.checkData(0, 2, 60)
+
+ def show_local_variables_like(self):
+ tdSql.query("show local variables")
+ tdSql.checkRows(85)
+ tdSql.query("show local variables like 'debugFlag'")
+ tdSql.checkRows(1)
+ tdSql.checkData(0, 0, 'debugFlag')
+ tdSql.checkData(0, 1, 0)
+
+ tdSql.query("show local variables like '%debugFlag'")
+ tdSql.checkRows(9)
+
+ tdSql.query("show local variables like '____debugFlag'")
+ tdSql.checkRows(0)
+
+ tdSql.query("show local variables like 's3MigrateEnab%'")
+ tdSql.checkRows(0)
+
+ tdSql.query("show local variables like 'mini%'")
+ tdSql.checkRows(3)
+ tdSql.checkData(0, 0, 'minimalTmpDirGB')
+
+ tdSql.query("show local variables like '%info'")
+ tdSql.checkRows(2)
+
+ def show_cluster_variables_like(self):
+ zones = ["", "cluster"]
+ for zone in zones:
+ tdLog.info(f"show {zone} variables")
+ tdSql.query(f"show {zone} variables")
+ tdSql.checkRows(87)
+
+ tdLog.info(f"show {zone} variables like 'debugFlag'")
+ #tdSql.query(f"show {zone} variables like 'debugFlag'")
+ #tdSql.checkRows(0)
+
+ tdSql.query(f"show {zone} variables like 's3%'")
+ tdSql.checkRows(6)
+
+ tdSql.query(f"show {zone} variables like 'Max%'")
+ tdSql.checkRows(3)
+
+ tdSql.query(f"show {zone} variables like 'ttl%'")
+ tdSql.checkRows(5)
+
+ tdSql.query(f"show {zone} variables like 'ttl34343434%'")
+ tdSql.checkRows(0)
+
+ tdSql.query(f"show {zone} variables like 'jdlkfdjdfkdfnldlfdnfkdkfdmfdlfmnnnnnjkjk'")
+ tdSql.checkRows(0)
+
+
def threadTest(self, threadID):
print(f"Thread {threadID} starting...")
tdsqln = tdCom.newTdSql()
@@ -127,6 +179,14 @@ class TDTestCase:
tdLog.printNoPrefix("==========start case3 run ...............")
self.case3()
tdLog.printNoPrefix("==========end case3 run ...............")
+
+ tdLog.printNoPrefix("==========start show_local_variables_like run ...............")
+ self.show_local_variables_like()
+ tdLog.printNoPrefix("==========end show_local_variables_like run ...............")
+
+ tdLog.printNoPrefix("==========start show_cluster_variables_like run ...............")
+ self.show_cluster_variables_like()
+ tdLog.printNoPrefix("==========end show_cluster_variables_like run ...............")
def stop(self):
tdSql.close()
From 4d7c1a40671111a02903917bcc3b0c3099f0c1e4 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 09:06:08 +0800
Subject: [PATCH 044/105] feat: csv supports exporting create sql stmt
---
tools/taos-tools/inc/benchCsv.h | 3 -
tools/taos-tools/src/benchCsv.c | 288 ++++++++++++++++++++++++++++++++
2 files changed, 288 insertions(+), 3 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 11666a0b45..62e0dea7d7 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -95,9 +95,6 @@ typedef struct {
} CsvThreadArgs;
-
-
-
int csvTestProcess();
#endif // INC_BENCHCSV_H_
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 92641f8ea8..246ff79287 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -167,11 +167,296 @@ static int csvGenCsvHeader(CsvWriteMeta* write_meta) {
pos += snprintf(buf + pos, size - pos, ",%s", tag->name);
}
+ // line break
+ pos += snprintf(buf + pos, size - pos, "\n");
+
write_meta->csv_header_length = (pos > 0 && pos < size) ? pos : 0;
return (pos > 0 && pos < size) ? 0 : -1;
}
+int csvGenCreateDbSql(SDataBase* db, char* buf, int size) {
+ int pos = 0;
+
+ pos += snprintf(buf + pos, size - pos, "CREATE DATABASE IF NOT EXISTS ");
+ if (pos <= 0 || pos >= size) return -1;
+
+ pos += snprintf(buf + pos, size - pos, g_arguments->escape_character ? "`%s`" : "%s", db->dbName);
+ if (pos <= 0 || pos >= size) return -1;
+
+ if (-1 != g_arguments->inputted_vgroups) {
+ pos += snprintf(buf + pos, size - pos, " VGROUPS %d", g_arguments->inputted_vgroups);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ if (db->cfgs) {
+ for (size i = 0; i < db->cfgs->size; ++i) {
+ SDbCfg* cfg = benchArrayGet(db->cfgs, i);
+ if (cfg->valuestring) {
+ pos += snprintf(buf + pos, size - pos, " %s %s", cfg->name, cfg->valuestring);
+ } else {
+ pos += snprintf(buf + pos, size - pos, " %s %d", cfg->name, cfg->valueint);
+ }
+ if (pos <= 0 || pos >= size) return -1;
+ }
+ }
+
+ switch (db->precision) {
+ case TSDB_TIME_PRECISION_MILLI:
+ pos += snprintf(buf + pos, size - pos, " PRECISION 'ms';\n");
+ break;
+ case TSDB_TIME_PRECISION_MICRO:
+ pos += snprintf(buf + pos, size - pos, " PRECISION 'us';\n");
+ break;
+ case TSDB_TIME_PRECISION_NANO:
+ pos += snprintf(buf + pos, size - pos, " PRECISION 'ns';\n");
+ break;
+ }
+
+ return (pos > 0 && pos < size) ? pos : -1;
+}
+
+
+static int csvExportCreateDbSql(CsvWriteMeta* write_meta, FILE* fp) {
+ char buf[LARGE_BUFF_LEN] = {};
+ int ret = 0;
+ int length = 0;
+
+ length = csvGenCreateDbSql(write_meta->db, buf, sizeof(buf));
+ if (length < 0) {
+ errorPrint("Failed to generate create db sql, maybe buffer[%d] not enough.\n", sizeof(buf));
+ return -1;
+ }
+
+ ret = fwrite(buf, 1, length, fp);
+ if (ret != length) {
+ errorPrint("Failed to write create db sql: %s. expected written %d but %d.\n",
+ buf, length, ret);
+ if (ferror(fp)) {
+ perror("error");
+ }
+ return -1;
+ }
+
+ return 0;
+}
+
+
+int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
+ int pos = 0;
+
+ pos += snprintf(buf + pos, size - pos, "CREATE TABLE IF NOT EXISTS ");
+ if (pos <= 0 || pos >= size) return -1;
+
+ pos += snprintf(buf + pos, size - pos, g_arguments->escape_character ? "`%s`.`%s`" : "%s.%s", db->dbName, stb->stbName);
+ if (pos <= 0 || pos >= size) return -1;
+
+ pos += snprintf(buf + pos, size - pos, " (ts TIMESTAMP");
+ if (pos <= 0 || pos >= size) return -1;
+
+
+ // columns
+ for (sizt_t i = 0; i < stb->cols->size; ++i) {
+ Field* col = benchArrayGet(stb->cols, i);
+
+ if (col->type == TSDB_DATA_TYPE_BINARY
+ || col->type == TSDB_DATA_TYPE_NCHAR
+ || col->type == TSDB_DATA_TYPE_VARBINARY
+ || col->type == TSDB_DATA_TYPE_GEOMETRY) {
+
+ if (col->type == TSDB_DATA_TYPE_GEOMETRY && col->length < 21) {
+ errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %d\n", __func__, __LINE__, i);
+ return -1;
+ }
+
+ pos += snprintf(buf + pos, size - pos, ",%s %s(%d)", col->name, convertDatatypeToString(col->type), col->length);
+ } else {
+ pos += snprintf(buf + pos, size - pos, ",%s %s", col->name, convertDatatypeToString(col->type));
+ }
+ if (pos <= 0 || pos >= size) return -1;
+
+ // primary key
+ if (stb->primary_key && i == 0) {
+ pos += snprintf(buf + pos, size - pos, " %s", PRIMARY_KEY);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // compress key
+ if (strlen(col->encode) > 0) {
+ pos += snprintf(buf + pos, size - pos, " encode '%s'", col->encode);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+ if (strlen(col->compress) > 0) {
+ pos += snprintf(buf + pos, size - pos, " compress '%s'", col->compress);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+ if (strlen(col->level) > 0) {
+ pos += snprintf(buf + pos, size - pos, " level '%s'", col->level);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+ }
+
+ pos += snprintf(buf + pos, size - pos, ") TAGS (");
+ if (pos <= 0 || pos >= size) return -1;
+
+
+ // tags
+ for (sizt_t i = 0; i < stb->tags->size; ++i) {
+ Field* tag = benchArrayGet(stb->tags, i);
+
+ if (i > 0) {
+ pos += snprintf(buf + pos, size - pos, ",");
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ if (tag->type == TSDB_DATA_TYPE_BINARY
+ || tag->type == TSDB_DATA_TYPE_NCHAR
+ || tag->type == TSDB_DATA_TYPE_VARBINARY
+ || tag->type == TSDB_DATA_TYPE_GEOMETRY) {
+
+ if (tag->type == TSDB_DATA_TYPE_GEOMETRY && tag->length < 21) {
+ errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %d\n", __func__, __LINE__, i);
+ return -1;
+ }
+
+ pos += snprintf(buf + pos, size - pos, "%s %s(%d)", tag->name, convertDatatypeToString(tag->type), tag->length);
+
+ } else {
+ pos += snprintf(buf + pos, size - pos, "%s %s", tag->name, convertDatatypeToString(tag->type));
+ }
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ pos += snprintf(buf + pos, size - pos, ")");
+ if (pos <= 0 || pos >= size) return -1;
+
+
+ // comment
+ if (stb->comment != NULL) {
+ pos += snprintf(buf + pos, size - pos," COMMENT '%s'", stb->comment);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // delay
+ if (stb->delay >= 0) {
+ pos += snprintf(buf + pos, size - pos, " DELAY %d", stb->delay);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // file factor
+ if (stb->file_factor >= 0) {
+ pos += snprintf(buf + pos, size - pos, " FILE_FACTOR %f", stb->file_factor / 100.0);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // rollup
+ if (stb->rollup != NULL) {
+ pos += snprintf(buf + pos, size - pos, " ROLLUP(%s)", stb->rollup);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // max delay
+ if (stb->max_delay != NULL) {
+ pos += snprintf(buf + pos, size - pos, " MAX_DELAY %s", stb->max_delay);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ // watermark
+ if (stb->watermark != NULL) {
+ pos += snprintf(buf + pos, size - pos, " WATERMARK %s", stb->watermark);
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ bool first_sma = true;
+ for (size_t i = 0; i < stb->cols->size; ++i) {
+ Field* col = benchArrayGet(stb->cols, i);
+ if (col->sma) {
+ if (first_sma) {
+ pos += snprintf(buf + pos, size - pos, " SMA(%s", col->name);
+ first_sma = false;
+ } else {
+ pos += snprintf(buf + pos, size - pos, ",%s", col->name);
+ }
+ if (pos <= 0 || pos >= size) return -1;
+ }
+ }
+ if (!first_sma) {
+ pos += snprintf(buf + pos, size - pos, ")");
+ if (pos <= 0 || pos >= size) return -1;
+ }
+
+ infoPrint("create stable: <%s>\n", buf);
+ return (pos > 0 && pos < size) ? pos : -1;
+}
+
+
+static int csvExportCreateStbSql(CsvWriteMeta* write_meta, FILE* fp) {
+ char buf[4096] = {};
+ int ret = 0;
+ int length = 0;
+
+ length = csvGenCreateStbSql(write_meta->db, write_meta->stb, buf, sizeof(buf));
+ if (length < 0) {
+ errorPrint("Failed to generate create stb sql, maybe buffer[%d] not enough.\n", sizeof(buf));
+ return -1;
+ }
+
+ ret = fwrite(buf, 1, length, fp);
+ if (ret != length) {
+ errorPrint("Failed to write create stb sql: %s. expected written %d but %d.\n",
+ buf, length, ret);
+ if (ferror(fp)) {
+ perror("error");
+ }
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static int csvExportCreateSql(CsvWriteMeta* write_meta) {
+ char fullname[MAX_PATH_LEN] = {};
+ char buf[LARGE_BUFF_LEN] = {};
+ int ret = 0;
+ int length = 0;
+ FILE* fp = NULL;
+
+
+ length = snprintf(fullname, sizeof(fullname), "%s%s.txt", g_arguments->output_path, "create_stmt");
+ if (length <= 0 || length >= sizeof(fullname)) {
+ return -1;
+ }
+
+ FILE* fp = fopen(fullname, "w");
+ if (!fp) {
+ return -1;
+ }
+
+
+ // export db
+ ret = csvExportCreateDbSql(write_meta, fp);
+ if (ret < 0) {
+ goto end;
+ }
+
+ // export stb
+ ret = csvExportCreateStbSql(write_meta, fp);
+ if (ret < 0) {
+ goto end;
+ }
+
+ succPrint("Export create sql to file: %s successfully..\n", fullname);
+
+end:
+ if (fp) {
+ fclose(fp);
+ }
+
+ return ret;
+}
+
+
static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write_meta) {
write_meta->naming_type = csvGetFileNamingType(stb);
write_meta->total_threads = 1;
@@ -816,6 +1101,9 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
ts_elapse / 1000.0, total_rows, write_meta->total_threads, g_arguments->output_path, total_rows * 1000.0 / ts_elapse);
}
+ // export create db/stb sql
+ ret = csvExportCreateSql(write_meta);
+
end:
tmfree(pids);
tmfree(args);
From 8ba478cad0ffd60897601c78ca8c245c1b93a833 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 14:34:15 +0800
Subject: [PATCH 045/105] fix: resolve csv compilation errors
---
tools/taos-tools/inc/bench.h | 11 +-
tools/taos-tools/inc/benchCsv.h | 9 +-
tools/taos-tools/inc/benchLog.h | 32 ++--
tools/taos-tools/src/benchCsv.c | 222 ++++++++++++++--------------
tools/taos-tools/src/benchJsonOpt.c | 1 -
5 files changed, 139 insertions(+), 136 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index ac187d2575..30973170a3 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -719,6 +719,14 @@ typedef struct STmqMetaInfo_S {
uint16_t iface;
} STmqMetaInfo;
+
+typedef enum {
+ CSV_COMPRESS_NONE = 0,
+ CSV_COMPRESS_FAST = 1,
+ CSV_COMPRESS_BALANCE = 6,
+ CSV_COMPRESS_BEST = 9
+} CsvCompressionLevel;
+
typedef struct SArguments_S {
uint8_t taosc_version;
char * metaFile;
@@ -786,9 +794,10 @@ typedef struct SArguments_S {
char* csv_file_prefix;
char* csv_ts_format;
char* csv_ts_interval;
+ char* csv_tbname_alias;
long csv_ts_intv_secs;
bool csv_output_header;
- bool csv_tbname_alias;
+
CsvCompressionLevel csv_compress_level;
} SArguments;
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 62e0dea7d7..f9f87aa341 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -27,13 +27,6 @@ typedef enum {
CSV_NAMING_B_THREAD_TIME_SLICE
} CsvNamingType;
-typedef enum {
- CSV_COMPRESS_NONE = 0,
- CSV_COMPRESS_FAST = 1,
- CSV_COMPRESS_BALANCE = 6,
- CSV_COMPRESS_BEST = 9
-} CsvCompressionLevel;
-
typedef enum {
CSV_ERR_OK = 0,
CSV_ERR_OPEN_FAILED,
@@ -85,7 +78,7 @@ typedef struct {
size_t thread_id;
bool output_header;
int tags_buf_size;
- CsvRowTagsBuf* tags_buf_bucket;
+ CsvRowTagsBuf* tags_buf_array;
CsvRowColsBuf* cols_buf;
} CsvThreadMeta;
diff --git a/tools/taos-tools/inc/benchLog.h b/tools/taos-tools/inc/benchLog.h
index 426112bcd8..961a037e3c 100644
--- a/tools/taos-tools/inc/benchLog.h
+++ b/tools/taos-tools/inc/benchLog.h
@@ -16,6 +16,8 @@
#ifndef INC_BENCHLOG_H_
#define INC_BENCHLOG_H_
+#include
+
//
// suport thread safe log module
//
@@ -53,7 +55,7 @@ void exitLog();
(int32_t)timeSecs.tv_usec); \
fprintf(stdout, "DEBG: "); \
fprintf(stdout, "%s(%d) ", __FILE__, __LINE__); \
- fprintf(stdout, "" fmt, __VA_ARGS__); \
+ fprintf(stdout, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDOUT); \
} \
} while (0)
@@ -74,7 +76,7 @@ void exitLog();
(int32_t)timeSecs.tv_usec); \
fprintf(stdout, "DEBG: "); \
fprintf(stdout, "%s(%d) ", __FILE__, __LINE__); \
- fprintf(stdout, "" fmt, __VA_ARGS__); \
+ fprintf(stdout, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDOUT); \
} \
} while (0)
@@ -94,7 +96,7 @@ void exitLog();
do { \
if (g_arguments->debug_print) { \
lockLog(LOG_STDOUT); \
- fprintf(stdout, "" fmt, __VA_ARGS__); \
+ fprintf(stdout, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDOUT); \
} \
} while (0)
@@ -102,14 +104,14 @@ void exitLog();
#define infoPrintNoTimestamp(fmt, ...) \
do { \
lockLog(LOG_STDOUT); \
- fprintf(stdout, "" fmt, __VA_ARGS__); \
+ fprintf(stdout, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDOUT); \
} while (0)
#define infoPrintNoTimestampToFile(fmt, ...) \
do { \
lockLog(LOG_RESULT); \
- fprintf(g_arguments->fpOfInsertResult, "" fmt, __VA_ARGS__); \
+ fprintf(g_arguments->fpOfInsertResult, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_RESULT); \
} while (0)
@@ -126,7 +128,7 @@ void exitLog();
ptm->tm_mon + 1, \
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
- fprintf(stdout, "INFO: " fmt, __VA_ARGS__); \
+ fprintf(stdout, "INFO: " fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDOUT); \
} while (0)
@@ -142,7 +144,7 @@ void exitLog();
fprintf(g_arguments->fpOfInsertResult,"[%02d/%02d %02d:%02d:%02d.%06d] ", ptm->tm_mon + 1, \
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
- fprintf(g_arguments->fpOfInsertResult, "INFO: " fmt, __VA_ARGS__);\
+ fprintf(g_arguments->fpOfInsertResult, "INFO: " fmt, ##__VA_ARGS__);\
unlockLog(LOG_RESULT); \
} while (0)
@@ -160,7 +162,7 @@ void exitLog();
ptm->tm_mon + 1, \
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
- fprintf(stderr, "PERF: " fmt, __VA_ARGS__); \
+ fprintf(stderr, "PERF: " fmt, ##__VA_ARGS__); \
unlockLog(LOG_STDERR); \
if (g_arguments->fpOfInsertResult && !g_arguments->terminate) { \
lockLog(LOG_RESULT); \
@@ -172,7 +174,7 @@ void exitLog();
(int32_t)timeSecs.tv_usec); \
fprintf(g_arguments->fpOfInsertResult, "PERF: "); \
fprintf(g_arguments->fpOfInsertResult, \
- "" fmt, __VA_ARGS__); \
+ "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_RESULT); \
} \
} \
@@ -196,7 +198,7 @@ void exitLog();
if (g_arguments->debug_print) { \
fprintf(stderr, "%s(%d) ", __FILE__, __LINE__); \
} \
- fprintf(stderr, "" fmt, __VA_ARGS__); \
+ fprintf(stderr, "" fmt, ##__VA_ARGS__); \
fprintf(stderr, "\033[0m"); \
unlockLog(LOG_STDERR); \
if (g_arguments->fpOfInsertResult && !g_arguments->terminate) { \
@@ -206,7 +208,7 @@ void exitLog();
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
fprintf(g_arguments->fpOfInsertResult, "ERROR: "); \
- fprintf(g_arguments->fpOfInsertResult, "" fmt, __VA_ARGS__); \
+ fprintf(g_arguments->fpOfInsertResult, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_RESULT); \
} \
} while (0)
@@ -229,7 +231,7 @@ void exitLog();
if (g_arguments->debug_print) { \
fprintf(stderr, "%s(%d) ", __FILE__, __LINE__); \
} \
- fprintf(stderr, "" fmt, __VA_ARGS__); \
+ fprintf(stderr, "" fmt, ##__VA_ARGS__); \
fprintf(stderr, "\033[0m"); \
unlockLog(LOG_STDERR); \
if (g_arguments->fpOfInsertResult && !g_arguments->terminate) { \
@@ -239,7 +241,7 @@ void exitLog();
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
fprintf(g_arguments->fpOfInsertResult, "WARN: "); \
- fprintf(g_arguments->fpOfInsertResult, "" fmt, __VA_ARGS__); \
+ fprintf(g_arguments->fpOfInsertResult, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_RESULT); \
} \
} while (0)
@@ -262,7 +264,7 @@ void exitLog();
if (g_arguments->debug_print) { \
fprintf(stderr, "%s(%d) ", __FILE__, __LINE__); \
} \
- fprintf(stderr, "" fmt, __VA_ARGS__); \
+ fprintf(stderr, "" fmt, ##__VA_ARGS__); \
fprintf(stderr, "\033[0m"); \
unlockLog(LOG_STDERR); \
if (g_arguments->fpOfInsertResult && !g_arguments->terminate) { \
@@ -272,7 +274,7 @@ void exitLog();
ptm->tm_mday, ptm->tm_hour, ptm->tm_min, ptm->tm_sec, \
(int32_t)timeSecs.tv_usec); \
fprintf(g_arguments->fpOfInsertResult, "SUCC: "); \
- fprintf(g_arguments->fpOfInsertResult, "" fmt, __VA_ARGS__); \
+ fprintf(g_arguments->fpOfInsertResult, "" fmt, ##__VA_ARGS__); \
unlockLog(LOG_RESULT); \
} \
} while (0)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 246ff79287..c491e94606 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -14,6 +14,7 @@
#include
#include
#include
+#include
#include "benchLog.h"
#include "benchData.h"
@@ -77,25 +78,25 @@ static CsvNamingType csvGetFileNamingType(SSuperTable* stb) {
}
-static void csvCalcTimestampStep(CsvWriteMeta* meta) {
+static void csvCalcTimestampStep(CsvWriteMeta* write_meta) {
time_t ts_step = 0;
- if (meta->db->precision == TSDB_TIME_PRECISION_MICRO) {
+ if (write_meta->db->precision == TSDB_TIME_PRECISION_MICRO) {
ts_step = g_arguments->csv_ts_intv_secs * 1000000L;
- } else if (db->precision == TSDB_TIME_PRECISION_NANO) {
+ } else if (write_meta->db->precision == TSDB_TIME_PRECISION_NANO) {
ts_step = g_arguments->csv_ts_intv_secs * 1000000000L;
} else {
ts_step = g_arguments->csv_ts_intv_secs * 1000L;
}
- meta->ts_step = ts_step;
+ write_meta->ts_step = ts_step;
return;
}
-static void csvCalcCtbRange(CsvThreadMeta* meta, size_t total_threads, int64_t ctb_offset, int64_t ctb_count) {
+static void csvCalcCtbRange(CsvThreadMeta* thread_meta, size_t total_threads, int64_t ctb_offset, int64_t ctb_count) {
uint64_t ctb_start_idx = 0;
uint64_t ctb_end_idx = 0;
- size_t tid_idx = meta->thread_id - 1;
+ size_t tid_idx = thread_meta->thread_id - 1;
size_t base = ctb_count / total_threads;
size_t remainder = ctb_count % total_threads;
@@ -111,35 +112,34 @@ static void csvCalcCtbRange(CsvThreadMeta* meta, size_t total_threads, int64_t c
ctb_end_idx = ctb_offset + ctb_count;
}
- meta->ctb_start_idx = ctb_start_idx;
- meta->ctb_end_idx = ctb_end_idx;
- meta->ctb_count = ctb_count;
+ thread_meta->ctb_start_idx = ctb_start_idx;
+ thread_meta->ctb_end_idx = ctb_end_idx;
+ thread_meta->ctb_count = ctb_count;
return;
}
-static void csvGenThreadFormatter(CsvWriteMeta* meta) {
+static void csvGenThreadFormatter(CsvWriteMeta* write_meta) {
int digits = 0;
- if (meta->total_threads == 0) {
+ if (write_meta->total_threads == 0) {
digits = 1;
} else {
- for (int n = meta->total_threads; n > 0; n /= 10) {
+ for (int n = write_meta->total_threads; n > 0; n /= 10) {
digits++;
}
}
if (digits <= 1) {
- (void)snprintf(meta->thread_formatter, sizeof(meta->thread_formatter), "%%d");
+ (void)snprintf(write_meta->thread_formatter, sizeof(write_meta->thread_formatter), "%%d");
} else {
- (void)snprintf(meta->thread_formatter, sizeof(meta->thread_formatter), "%%0%dd", digits);
+ (void)snprintf(write_meta->thread_formatter, sizeof(write_meta->thread_formatter), "%%0%dd", digits);
}
return;
}
static int csvGenCsvHeader(CsvWriteMeta* write_meta) {
- SDataBase* db = write_meta->db;
SSuperTable* stb = write_meta->stb;
char* buf = write_meta->csv_header;
int pos = 0;
@@ -190,7 +190,7 @@ int csvGenCreateDbSql(SDataBase* db, char* buf, int size) {
}
if (db->cfgs) {
- for (size i = 0; i < db->cfgs->size; ++i) {
+ for (size_t i = 0; i < db->cfgs->size; ++i) {
SDbCfg* cfg = benchArrayGet(db->cfgs, i);
if (cfg->valuestring) {
pos += snprintf(buf + pos, size - pos, " %s %s", cfg->name, cfg->valuestring);
@@ -224,7 +224,7 @@ static int csvExportCreateDbSql(CsvWriteMeta* write_meta, FILE* fp) {
length = csvGenCreateDbSql(write_meta->db, buf, sizeof(buf));
if (length < 0) {
- errorPrint("Failed to generate create db sql, maybe buffer[%d] not enough.\n", sizeof(buf));
+ errorPrint("Failed to generate create db sql, maybe buffer[%zu] not enough.\n", sizeof(buf));
return -1;
}
@@ -256,7 +256,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
// columns
- for (sizt_t i = 0; i < stb->cols->size; ++i) {
+ for (size_t i = 0; i < stb->cols->size; ++i) {
Field* col = benchArrayGet(stb->cols, i);
if (col->type == TSDB_DATA_TYPE_BINARY
@@ -265,7 +265,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
|| col->type == TSDB_DATA_TYPE_GEOMETRY) {
if (col->type == TSDB_DATA_TYPE_GEOMETRY && col->length < 21) {
- errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %d\n", __func__, __LINE__, i);
+ errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %zu.\n", __func__, __LINE__, i);
return -1;
}
@@ -301,7 +301,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
// tags
- for (sizt_t i = 0; i < stb->tags->size; ++i) {
+ for (size_t i = 0; i < stb->tags->size; ++i) {
Field* tag = benchArrayGet(stb->tags, i);
if (i > 0) {
@@ -315,7 +315,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
|| tag->type == TSDB_DATA_TYPE_GEOMETRY) {
if (tag->type == TSDB_DATA_TYPE_GEOMETRY && tag->length < 21) {
- errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %d\n", __func__, __LINE__, i);
+ errorPrint("%s() LN%d, geometry filed len must be greater than 21 on %zu.\n", __func__, __LINE__, i);
return -1;
}
@@ -397,7 +397,7 @@ static int csvExportCreateStbSql(CsvWriteMeta* write_meta, FILE* fp) {
length = csvGenCreateStbSql(write_meta->db, write_meta->stb, buf, sizeof(buf));
if (length < 0) {
- errorPrint("Failed to generate create stb sql, maybe buffer[%d] not enough.\n", sizeof(buf));
+ errorPrint("Failed to generate create stb sql, maybe buffer[%zu] not enough.\n", sizeof(buf));
return -1;
}
@@ -417,7 +417,6 @@ static int csvExportCreateStbSql(CsvWriteMeta* write_meta, FILE* fp) {
static int csvExportCreateSql(CsvWriteMeta* write_meta) {
char fullname[MAX_PATH_LEN] = {};
- char buf[LARGE_BUFF_LEN] = {};
int ret = 0;
int length = 0;
FILE* fp = NULL;
@@ -428,7 +427,7 @@ static int csvExportCreateSql(CsvWriteMeta* write_meta) {
return -1;
}
- FILE* fp = fopen(fullname, "w");
+ fp = fopen(fullname, "w");
if (!fp) {
return -1;
}
@@ -475,7 +474,7 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
return -1;
}
- switch (meta.naming_type) {
+ switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
break;
}
@@ -484,18 +483,18 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
break;
}
case CSV_NAMING_B_THREAD: {
- meta.total_threads = g_arguments->nthreads;
+ write_meta->total_threads = g_arguments->nthreads;
csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- meta.total_threads = g_arguments->nthreads;
+ write_meta->total_threads = g_arguments->nthreads;
csvGenThreadFormatter(write_meta);
csvCalcTimestampStep(write_meta);
break;
}
default: {
- meta.naming_type = CSV_NAMING_I_SINGLE;
+ write_meta->naming_type = CSV_NAMING_I_SINGLE;
break;
}
}
@@ -516,10 +515,10 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
thread_meta->thread_id = thread_id;
thread_meta->output_header = false;
thread_meta->tags_buf_size = 0;
- thread_meta->tags_buf_bucket = NULL;
+ thread_meta->tags_buf_array = NULL;
thread_meta->cols_buf = NULL;
- csvCalcCtbRange(write_meta, write_meta->total_threads, stb->childTblFrom, stb->childTblCount);
+ csvCalcCtbRange(thread_meta, write_meta->total_threads, stb->childTblFrom, stb->childTblCount);
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE:
@@ -533,7 +532,6 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
break;
}
default: {
- thread_meta->naming_type = CSV_NAMING_I_SINGLE;
break;
}
}
@@ -553,7 +551,7 @@ static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
thread_meta->start_secs = csvGetStartSeconds(db->precision, last_end_ts);
- thread_meta->end_secs = thread_meta.start_secs + g_arguments->csv_ts_intv_secs;
+ thread_meta->end_secs = thread_meta->start_secs + g_arguments->csv_ts_intv_secs;
break;
}
default: {
@@ -569,7 +567,7 @@ static const char* csvGetGzipFilePrefix() {
if (g_arguments->csv_compress_level == CSV_COMPRESS_NONE) {
return "";
} else {
- return ".gz"
+ return ".gz";
}
}
@@ -583,26 +581,26 @@ static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_me
const char* file_prefix = g_arguments->csv_file_prefix;
const char* gzip_suffix = csvGetGzipFilePrefix();
- switch (meta->naming_type) {
+ switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
- ret = snprintf(fullname, size, "%s%s.csv%s", base_path, file_prefix, g_arguments->csv_compress_level, gzip_suffix);
+ ret = snprintf(fullname, size, "%s%s.csv%s", base_path, file_prefix, gzip_suffix);
break;
}
case CSV_NAMING_I_TIME_SLICE: {
- csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
- csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ csvConvertTime2String(thread_meta->start_secs, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(thread_meta->end_secs, end_time_buf, sizeof(end_time_buf));
ret = snprintf(fullname, size, "%s%s_%s_%s.csv%s", base_path, file_prefix, start_time_buf, end_time_buf, gzip_suffix);
break;
}
case CSV_NAMING_B_THREAD: {
- (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
+ (void)snprintf(thread_buf, sizeof(thread_buf), write_meta->thread_formatter, thread_meta->thread_id);
ret = snprintf(fullname, size, "%s%s_%s.csv%s", base_path, file_prefix, thread_buf, gzip_suffix);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- (void)snprintf(thread_buf, sizeof(thread_buf), meta->thread_formatter, meta->thread_id);
- csvConvertTime2String(meta->start_secs, start_time_buf, sizeof(start_time_buf));
- csvConvertTime2String(meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ (void)snprintf(thread_buf, sizeof(thread_buf), write_meta->thread_formatter, thread_meta->thread_id);
+ csvConvertTime2String(thread_meta->start_secs, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(thread_meta->end_secs, end_time_buf, sizeof(end_time_buf));
ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv%s", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf, gzip_suffix);
break;
}
@@ -699,6 +697,24 @@ static int csvGenRowColData(char* buf, int size, SSuperTable* stb, int64_t ts, i
}
+static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowTagsBuf* tags_buf_array) {
+ if (!thread_meta || !tags_buf_array) {
+ return;
+ }
+
+ for (uint64_t i = 0 ; i < thread_meta->ctb_count; ++i) {
+ char* tags_buf = tags_buf_array[i].buf;
+ if (tags_buf) {
+ tmfree(tags_buf_array);
+ } else {
+ break;
+ }
+ }
+ tmfree(tags_buf_array);
+ return;
+}
+
+
static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
SSuperTable* stb = write_meta->stb;
int ret = 0;
@@ -708,8 +724,8 @@ static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta*
return NULL;
}
- CsvRowTagsBuf* tags_buf_bucket = (CsvRowTagsBuf*)benchCalloc(thread_meta->ctb_count, sizeof(CsvRowTagsBuf), true);
- if (!tags_buf_bucket) {
+ CsvRowTagsBuf* tags_buf_array = (CsvRowTagsBuf*)benchCalloc(thread_meta->ctb_count, sizeof(CsvRowTagsBuf), true);
+ if (!tags_buf_array) {
return NULL;
}
@@ -721,43 +737,25 @@ static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta*
goto error;
}
- tags_buf_bucket[i].buf = tags_buf;
- write_meta->tags_buf_size = tags_buf_size;
+ tags_buf_array[i].buf = tags_buf;
+ thread_meta->tags_buf_size = tags_buf_size;
ret = csvGenRowTagData(tags_buf, tags_buf_size, stb, thread_meta->ctb_start_idx + i, &tk);
if (ret <= 0) {
goto error;
}
- tags_buf_bucket[i].length = ret;
+ tags_buf_array[i].length = ret;
}
- return tags_buf_bucket;
+ return tags_buf_array;
error:
- csvFreeCtbTagData(thread_meta, tags_buf_bucket);
+ csvFreeCtbTagData(thread_meta, tags_buf_array);
return NULL;
}
-static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowTagsBuf* tags_buf_bucket) {
- if (!thread_meta || !tags_buf_bucket) {
- return;
- }
-
- for (uint64_t i = 0 ; i < thread_meta->ctb_count; ++i) {
- char* tags_buf = tags_buf_bucket[i].buf;
- if (tags_buf) {
- tmfree(tags_buf_bucket);
- } else {
- break;
- }
- }
- tmfree(tags_buf_bucket);
- return;
-}
-
-
static CsvFileHandle* csvOpen(const char* filename, CsvCompressionLevel compress_level) {
CsvFileHandle* fhdl = NULL;
bool failed = false;
@@ -770,13 +768,13 @@ static CsvFileHandle* csvOpen(const char* filename, CsvCompressionLevel compress
}
if (compress_level == CSV_COMPRESS_NONE) {
- fhdl.handle.fp = fopen(filename, "w");
- failed = (!fhdl.handle.fp);
+ fhdl->handle.fp = fopen(filename, "w");
+ failed = (!fhdl->handle.fp);
} else {
char mode[TINY_BUFF_LEN];
(void)snprintf(mode, sizeof(mode), "wb%d", compress_level);
- fhdl.handle.gf = gzopen(filename, mode);
- failed = (!fhdl.handle.gf);
+ fhdl->handle.gf = gzopen(filename, mode);
+ failed = (!fhdl->handle.gf);
}
if (failed) {
@@ -806,9 +804,9 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
return CSV_ERR_WRITE_FAILED;
}
} else {
- unsigned int ret = gzwrite(fhdl->handle.gf, buf, size);
+ int ret = gzwrite(fhdl->handle.gf, buf, size);
if (ret != size) {
- errorPrint("Failed to write csv file: %s. expected written %zu but %zu.\n",
+ errorPrint("Failed to write csv file: %s. expected written %zu but %d.\n",
fhdl->filename, size, ret);
int errnum;
const char* errmsg = gzerror(fhdl->handle.gf, &errnum);
@@ -839,16 +837,16 @@ static void csvClose(CsvFileHandle* fhdl) {
static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, int64_t* ck, CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
SDataBase* db = write_meta->db;
SSuperTable* stb = write_meta->stb;
- CsvRowTagsBuf* tags_buf_bucket = thread_meta->tags_buf_bucket;
- CsvRowColsBuf* tags_buf = &tags_buf_bucket[ctb_idx];
+ CsvRowTagsBuf* tags_buf_array = thread_meta->tags_buf_array;
+ CsvRowTagsBuf* tags_buf = &tags_buf_array[ctb_idx];
CsvRowColsBuf* cols_buf = thread_meta->cols_buf;
int ret = 0;
ret = csvGenRowColData(cols_buf->buf, cols_buf->buf_size, stb, cur_ts, db->precision, ck);
if (ret <= 0) {
- errorPrint("Failed to generate csv column data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
- db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id, ctb_idx);
+ errorPrint("Failed to generate csv column data. database: %s, super table: %s, naming type: %d, thread index: %zu, ctb index: %" PRIu64 ".\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -858,7 +856,7 @@ static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, i
if (thread_meta->output_header) {
ret = csvWrite(fhdl, write_meta->csv_header, write_meta->csv_header_length);
if (ret != CSV_ERR_OK) {
- errorPrint("Failed to write csv header data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ errorPrint("Failed to write csv header data. database: %s, super table: %s, naming type: %d, thread index: %zu, ctb index: %" PRIu64 ".\n",
db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -869,7 +867,7 @@ static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, i
// write columns
ret = csvWrite(fhdl, cols_buf->buf, cols_buf->length);
if (ret != CSV_ERR_OK) {
- errorPrint("Failed to write csv column data, expected written %d but got %zu. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ errorPrint("Failed to write csv column data. database: %s, super table: %s, naming type: %d, thread index: %zu, ctb index: %" PRIu64 ".\n",
db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -877,7 +875,7 @@ static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, i
// write tags
ret = csvWrite(fhdl, tags_buf->buf, tags_buf->length);
if (ret != CSV_ERR_OK) {
- errorPrint("Failed to write csv tag data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ errorPrint("Failed to write csv tag data. database: %s, super table: %s, naming type: %d, thread index: %zu, ctb index: %" PRIu64 ".\n",
db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -885,7 +883,7 @@ static int csvWriteFile(CsvFileHandle* fhdl, uint64_t ctb_idx, int64_t cur_ts, i
// write line break
ret = csvWrite(fhdl, "\n", 1);
if (ret != CSV_ERR_OK) {
- errorPrint("Failed to write csv line break data. database: %s, super table: %s, naming type: %d, thread index: %d, ctb index: %" PRIu64 ".\n",
+ errorPrint("Failed to write csv line break data. database: %s, super table: %s, naming type: %d, thread index: %zu, ctb index: %" PRIu64 ".\n",
db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id, ctb_idx);
return -1;
}
@@ -922,10 +920,10 @@ static void* csvGenStbThread(void* arg) {
// tags buffer
- CsvRowTagsBuf* tags_buf_bucket = csvGenCtbTagData(write_meta, thread_meta);
- if (!tags_buf_bucket) {
- errorPrint("Failed to generate csv tag data. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
- db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ CsvRowTagsBuf* tags_buf_array = csvGenCtbTagData(write_meta, thread_meta);
+ if (!tags_buf_array) {
+ errorPrint("Failed to generate csv tag data. database: %s, super table: %s, naming type: %d, thread index: %zu.\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id);
return NULL;
}
@@ -933,8 +931,8 @@ static void* csvGenStbThread(void* arg) {
int buf_size = stb->lenOfCols + stb->cols->size;
char* buf = (char*)benchCalloc(1, buf_size, true);
if (!buf) {
- errorPrint("Failed to malloc csv column buffer. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
- db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ errorPrint("Failed to malloc csv column buffer. database: %s, super table: %s, naming type: %d, thread index: %zu.\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id);
goto end;
}
@@ -944,24 +942,23 @@ static void* csvGenStbThread(void* arg) {
.length = 0
};
- thread_meta->tags_buf_bucket = tags_buf_bucket;
+ thread_meta->tags_buf_array = tags_buf_array;
thread_meta->cols_buf = &cols_buf;
start_print_ts = toolsGetTimestampMs();
for (cur_ts = write_meta->start_ts; cur_ts < write_meta->end_ts; cur_ts += write_meta->ts_step) {
// get filename
- fullname[MAX_PATH_LEN] = {};
ret = csvGetFileFullname(write_meta, thread_meta, fullname, sizeof(fullname));
if (ret < 0) {
- errorPrint("Failed to generate csv filename. database: %s, super table: %s, naming type: %d, thread index: %d.\n",
- db->dbName, stb->stbName, write_meta.naming_type, thread_meta->thread_id);
+ errorPrint("Failed to generate csv filename. database: %s, super table: %s, naming type: %d, thread index: %zu.\n",
+ db->dbName, stb->stbName, write_meta->naming_type, thread_meta->thread_id);
goto end;
}
// create fd
fhdl = csvOpen(fullname, g_arguments->csv_compress_level);
if (fhdl == NULL) {
- errorPrint("Failed to create csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
+ errorPrint("Failed to create csv file. thread index: %zu, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
goto end;
}
@@ -972,7 +969,7 @@ static void* csvGenStbThread(void* arg) {
slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
file_rows = 0;
- infoPrint("thread[%d] begin to write csv file: %s.\n", thread_meta->thread_id, fullname);
+ infoPrint("thread[%zu] begin to write csv file: %s.\n", thread_meta->thread_id, fullname);
// write data
while (slice_cur_ts < slice_end_ts) {
@@ -982,7 +979,7 @@ static void* csvGenStbThread(void* arg) {
for (slice_ctb_cur_ts = slice_cur_ts; slice_ctb_cur_ts < slice_batch_ts; slice_ctb_cur_ts += write_meta->stb->timestamp_step) {
ret = csvWriteFile(fhdl, ctb_idx, slice_ctb_cur_ts, &ck, write_meta, thread_meta);
if (!ret) {
- errorPrint("Failed to write csv file. thread index: %d, file: %s, errno: %d, strerror: %s.\n",
+ errorPrint("Failed to write csv file. thread index: %zu, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
csvClose(fhdl);
goto end;
@@ -995,7 +992,7 @@ static void* csvGenStbThread(void* arg) {
cur_print_ts = toolsGetTimestampMs();
print_ts_elapse = cur_print_ts - pre_print_ts;
if (print_ts_elapse > 30000) {
- infoPrint("thread[%d] has currently inserted rows: %" PRIu64 ", period insert rate: %.2f rows/s.\n",
+ infoPrint("thread[%zu] has currently inserted rows: %" PRIu64 ", period insert rate: %.2f rows/s.\n",
thread_meta->thread_id, total_rows, (total_rows - pre_total_rows) * 1000.0 / print_ts_elapse);
pre_print_ts = cur_print_ts;
@@ -1014,18 +1011,18 @@ static void* csvGenStbThread(void* arg) {
}
csvClose(fhdl);
- csvUpdateSliceRange(write_meta, thread_meta, last_end_ts);
+ csvUpdateSliceRange(write_meta, thread_meta, slice_end_ts);
}
cur_print_ts = toolsGetTimestampMs();
print_ts_elapse = cur_print_ts - start_print_ts;
- succPrint("thread [%d] has completed inserting rows: %" PRIu64 ", insert rate %.2f rows/s.\n",
+ succPrint("thread [%zu] has completed inserting rows: %" PRIu64 ", insert rate %.2f rows/s.\n",
thread_meta->thread_id, total_rows, total_rows * 1000.0 / print_ts_elapse);
end:
thread_meta->total_rows = total_rows;
- csvFreeCtbTagData(tags_buf_bucket);
+ csvFreeCtbTagData(thread_meta, tags_buf_array);
tmfree(buf);
return NULL;
}
@@ -1038,8 +1035,12 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
int64_t start_ts = 0;
int64_t ts_elapse = 0;
+ CsvWriteMeta* write_meta = NULL;
+ CsvThreadArgs* args = NULL;
+ pthread_t* pids = NULL;
- CsvWriteMeta* write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
+
+ write_meta = benchCalloc(1, sizeof(CsvWriteMeta), false);
if (!write_meta) {
ret = -1;
goto end;
@@ -1051,13 +1052,13 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
goto end;
}
- CsvThreadArgs* args = benchCalloc(write_meta->total_threads, sizeof(CsvThreadArgs), false);
+ args = benchCalloc(write_meta->total_threads, sizeof(CsvThreadArgs), false);
if (!args) {
ret = -1;
goto end;
}
- pthread_t* pids = benchCalloc(write_meta.total_threads, sizeof(pthread_t), false);
+ pids = benchCalloc(write_meta->total_threads, sizeof(pthread_t), false);
if (!pids) {
ret = -1;
goto end;
@@ -1083,7 +1084,7 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
prompt = false;
}
- infoPrint("pthread_join %d ...\n", i);
+ infoPrint("pthread_join %u ...\n", i);
pthread_join(pids[i], NULL);
}
@@ -1097,7 +1098,7 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
ts_elapse = toolsGetTimestampMs() - start_ts;
if (ts_elapse > 0) {
- succPrint("Spent %.6f seconds to insert rows: %" PRIu64 " with %d thread(s) into %s, at a rate of %.2f rows/s.\n",
+ succPrint("Spent %.6f seconds to insert rows: %" PRIu64 " with %zu thread(s) into %s, at a rate of %.2f rows/s.\n",
ts_elapse / 1000.0, total_rows, write_meta->total_threads, g_arguments->output_path, total_rows * 1000.0 / ts_elapse);
}
@@ -1197,15 +1198,14 @@ static int csvParseParameter() {
// csv_output_path
size_t len = strlen(g_arguments->output_path);
if (len == 0) {
- errorPrint("Failed to generate csv files, the specified output path is empty. Please provide a valid path. database: %s, super table: %s.\n",
- db->dbName, stb->stbName);
+ errorPrint("Failed to generate csv files, the specified output path is empty. Please provide a valid path.\n");
return -1;
}
if (g_arguments->output_path[len - 1] != '/') {
int n = snprintf(g_arguments->output_path_buf, sizeof(g_arguments->output_path_buf), "%s/", g_arguments->output_path);
if (n < 0 || n >= sizeof(g_arguments->output_path_buf)) {
- errorPrint("Failed to generate csv files, path buffer overflow risk when appending '/'. path: %s, database: %s, super table: %s.\n",
- g_arguments->csv_output_path, db->dbName, stb->stbName);
+ errorPrint("Failed to generate csv files, path buffer overflow risk when appending '/'. path: %s.\n",
+ g_arguments->output_path);
return -1;
}
g_arguments->output_path = g_arguments->output_path_buf;
@@ -1214,8 +1214,8 @@ static int csvParseParameter() {
// csv_ts_format
if (g_arguments->csv_ts_format) {
if (csvValidateParamTsFormat(g_arguments->csv_ts_format) != 0) {
- errorPrint("Failed to generate csv files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s, database: %s, super table: %s.\n",
- g_arguments->csv_ts_format, db->dbName, stb->stbName);
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s.\n",
+ g_arguments->csv_ts_format);
return -1;
}
}
@@ -1223,8 +1223,8 @@ static int csvParseParameter() {
// csv_ts_interval
long csv_ts_intv_secs = csvValidateParamTsInterval(g_arguments->csv_ts_interval);
if (csv_ts_intv_secs <= 0) {
- errorPrint("Failed to generate csv files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s, database: %s, super table: %s.\n",
- g_arguments->csv_ts_interval, db->dbName, stb->stbName);
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s.\n",
+ g_arguments->csv_ts_interval);
return -1;
}
g_arguments->csv_ts_intv_secs = csv_ts_intv_secs;
@@ -1237,7 +1237,7 @@ static int csvWriteThread() {
for (size_t i = 0; i < g_arguments->databases->size && !g_arguments->terminate; ++i) {
// database
SDataBase* db = benchArrayGet(g_arguments->databases, i);
- if (database->superTbls) {
+ if (db->superTbls) {
for (size_t j = 0; j < db->superTbls->size && !g_arguments->terminate; ++j) {
// stb
SSuperTable* stb = benchArrayGet(db->superTbls, j);
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 21393b8d29..967b465dff 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -14,7 +14,6 @@
#include
#include
#include "benchLog.h"
-#include "benchCsv.h"
extern char g_configDir[MAX_PATH_LEN];
From baa789c54dbfca87b474582de4864f2000a985e6 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Mon, 3 Mar 2025 15:00:34 +0800
Subject: [PATCH 046/105] refactor: do not compile test-related files when
BUILD_TEST=OFF
---
CMakeLists.txt | 3 ++-
utils/CMakeLists.txt | 1 -
utils/test/c/CMakeLists.txt | 11 ++++++++++-
3 files changed, 12 insertions(+), 3 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index db5b89db3d..960301075a 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -33,12 +33,13 @@ target_include_directories(api INTERFACE "include/client")
if(${BUILD_TEST})
include(CTest)
enable_testing()
+ add_subdirectory(examples/c)
+ add_subdirectory(utils/tests/c)
endif(${BUILD_TEST})
add_subdirectory(source)
add_subdirectory(tools)
add_subdirectory(utils)
-add_subdirectory(examples/c)
add_subdirectory(tests)
include(${TD_SUPPORT_DIR}/cmake.install)
diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt
index 9872a9dc55..dae610ff69 100644
--- a/utils/CMakeLists.txt
+++ b/utils/CMakeLists.txt
@@ -1,6 +1,5 @@
# ADD_SUBDIRECTORY(examples/c)
ADD_SUBDIRECTORY(tsim)
-ADD_SUBDIRECTORY(test/c)
# ADD_SUBDIRECTORY(comparisonTest/tdengine)
IF(NOT "${TSZ_ENABLED}" MATCHES "false")
diff --git a/utils/test/c/CMakeLists.txt b/utils/test/c/CMakeLists.txt
index 1b2716b8e5..4629b6c8d1 100644
--- a/utils/test/c/CMakeLists.txt
+++ b/utils/test/c/CMakeLists.txt
@@ -1,5 +1,4 @@
add_executable(tmq_demo tmqDemo.c)
-add_dependencies(tmq_demo ${TAOS_LIB})
add_executable(tmq_sim tmqSim.c)
add_executable(create_table createTable.c)
add_executable(tmq_taosx_ci tmq_taosx_ci.c)
@@ -47,6 +46,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_demo
PUBLIC ${TAOS_LIB}
@@ -54,6 +54,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_sim
PUBLIC ${TAOS_LIB_PLATFORM_SPEC}
@@ -61,6 +62,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5466
PUBLIC ${TAOS_LIB}
@@ -68,6 +70,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32187
PUBLIC ${TAOS_LIB}
@@ -75,6 +78,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32471
PUBLIC ${TAOS_LIB}
@@ -82,6 +86,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td33798
PUBLIC ${TAOS_LIB}
@@ -89,6 +94,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32526
PUBLIC ${TAOS_LIB}
@@ -96,6 +102,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5776
PUBLIC ${TAOS_LIB}
@@ -103,6 +110,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_taosx_ci
PUBLIC ${TAOS_LIB}
@@ -110,6 +118,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_offset_test
PUBLIC ${TAOS_LIB}
From 2815227d2518ef2ff14e4cc0893a3b660f349e32 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 15:35:45 +0800
Subject: [PATCH 047/105] fix: thread concurrency <= count of a subtable
---
tools/taos-tools/src/benchCsv.c | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index c491e94606..850d3b9a98 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -483,12 +483,12 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
break;
}
case CSV_NAMING_B_THREAD: {
- write_meta->total_threads = g_arguments->nthreads;
+ write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- write_meta->total_threads = g_arguments->nthreads;
+ write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
csvGenThreadFormatter(write_meta);
csvCalcTimestampStep(write_meta);
break;
From 20f6c7dd53eb391225a87cccaf1a68e7ec7826dd Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 15:37:08 +0800
Subject: [PATCH 048/105] feat: csv output header supports true/false options
---
tools/taos-tools/src/benchJsonOpt.c | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 967b465dff..9bc8527130 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1620,11 +1620,13 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
// csv output header
- g_arguments->csv_output_header = false;
+ g_arguments->csv_output_header = true;
tools_cJSON* oph = tools_cJSON_GetObjectItem(json, "csv_output_header");
if (oph && oph->type == tools_cJSON_String && oph->valuestring != NULL) {
- if (0 == strcasecmp(oph->valuestring, "yes")) {
+ if (0 == strcasecmp(oph->valuestring, "yes") || 0 == strcasecmp(oph->valuestring, "true")) {
g_arguments->csv_output_header = true;
+ } else if (0 == strcasecmp(oph->valuestring, "no") || 0 == strcasecmp(oph->valuestring, "false")) {
+ g_arguments->csv_output_header = false;
}
}
From 5db648038b9866aea6c390a23d599f45f5fe6b81 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Mon, 3 Mar 2025 15:00:34 +0800
Subject: [PATCH 049/105] refactor: do not compile test-related files when
BUILD_TEST=OFF
---
CMakeLists.txt | 3 ++-
source/libs/function/CMakeLists.txt | 40 ++++++++++++++---------------
utils/CMakeLists.txt | 1 -
utils/test/c/CMakeLists.txt | 11 +++++++-
4 files changed, 31 insertions(+), 24 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index db5b89db3d..ac07a6d1e3 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -33,12 +33,13 @@ target_include_directories(api INTERFACE "include/client")
if(${BUILD_TEST})
include(CTest)
enable_testing()
+ add_subdirectory(examples/c)
+ add_subdirectory(utils/test/c)
endif(${BUILD_TEST})
add_subdirectory(source)
add_subdirectory(tools)
add_subdirectory(utils)
-add_subdirectory(examples/c)
add_subdirectory(tests)
include(${TD_SUPPORT_DIR}/cmake.install)
diff --git a/source/libs/function/CMakeLists.txt b/source/libs/function/CMakeLists.txt
index fad0a749d5..4934ca94e8 100644
--- a/source/libs/function/CMakeLists.txt
+++ b/source/libs/function/CMakeLists.txt
@@ -41,28 +41,26 @@ target_link_libraries(
PUBLIC uv_a
)
-add_executable(runUdf test/runUdf.c)
-target_include_directories(
- runUdf
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/contrib/libuv/include"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
+if(${BUILD_TEST})
+ add_executable(runUdf test/runUdf.c)
+ target_include_directories(
+ runUdf
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/contrib/libuv/include"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+ )
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(runUdf jemalloc)
-ENDIF()
-
-target_link_libraries(
- runUdf
- PUBLIC uv_a
- PRIVATE os util common nodes function ${LINK_JEMALLOC}
-)
+ target_link_libraries(
+ runUdf
+ PUBLIC uv_a
+ PRIVATE os util common nodes function ${LINK_JEMALLOC}
+ )
+endif()
add_library(udf1 STATIC MODULE test/udf1.c)
target_include_directories(
diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt
index 9872a9dc55..dae610ff69 100644
--- a/utils/CMakeLists.txt
+++ b/utils/CMakeLists.txt
@@ -1,6 +1,5 @@
# ADD_SUBDIRECTORY(examples/c)
ADD_SUBDIRECTORY(tsim)
-ADD_SUBDIRECTORY(test/c)
# ADD_SUBDIRECTORY(comparisonTest/tdengine)
IF(NOT "${TSZ_ENABLED}" MATCHES "false")
diff --git a/utils/test/c/CMakeLists.txt b/utils/test/c/CMakeLists.txt
index 1b2716b8e5..4629b6c8d1 100644
--- a/utils/test/c/CMakeLists.txt
+++ b/utils/test/c/CMakeLists.txt
@@ -1,5 +1,4 @@
add_executable(tmq_demo tmqDemo.c)
-add_dependencies(tmq_demo ${TAOS_LIB})
add_executable(tmq_sim tmqSim.c)
add_executable(create_table createTable.c)
add_executable(tmq_taosx_ci tmq_taosx_ci.c)
@@ -47,6 +46,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_demo
PUBLIC ${TAOS_LIB}
@@ -54,6 +54,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_sim
PUBLIC ${TAOS_LIB_PLATFORM_SPEC}
@@ -61,6 +62,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5466
PUBLIC ${TAOS_LIB}
@@ -68,6 +70,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32187
PUBLIC ${TAOS_LIB}
@@ -75,6 +78,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32471
PUBLIC ${TAOS_LIB}
@@ -82,6 +86,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td33798
PUBLIC ${TAOS_LIB}
@@ -89,6 +94,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32526
PUBLIC ${TAOS_LIB}
@@ -96,6 +102,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5776
PUBLIC ${TAOS_LIB}
@@ -103,6 +110,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_taosx_ci
PUBLIC ${TAOS_LIB}
@@ -110,6 +118,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_offset_test
PUBLIC ${TAOS_LIB}
From 4379882779b8d8306f1e47ed5c8ce02411b9a6be Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Mon, 3 Mar 2025 16:26:47 +0800
Subject: [PATCH 050/105] refactor: do not compile udf tests files when
BUILD_TEST=OFF
---
source/libs/function/CMakeLists.txt | 172 ++---------------------
source/libs/function/test/CMakeLists.txt | 60 ++++++++
source/libs/function/test/runUdf.c | 3 -
3 files changed, 70 insertions(+), 165 deletions(-)
create mode 100644 source/libs/function/test/CMakeLists.txt
diff --git a/source/libs/function/CMakeLists.txt b/source/libs/function/CMakeLists.txt
index 4934ca94e8..4e3c8dddab 100644
--- a/source/libs/function/CMakeLists.txt
+++ b/source/libs/function/CMakeLists.txt
@@ -1,11 +1,14 @@
aux_source_directory(src FUNCTION_SRC)
aux_source_directory(src/detail FUNCTION_SRC_DETAIL)
list(REMOVE_ITEM FUNCTION_SRC src/udfd.c)
-IF(COMPILER_SUPPORT_AVX2)
+
+if(COMPILER_SUPPORT_AVX2)
MESSAGE(STATUS "AVX2 instructions is ACTIVATED")
set_source_files_properties(src/detail/tminmaxavx.c PROPERTIES COMPILE_FLAGS -mavx2)
-ENDIF()
+endif()
+
add_library(function STATIC ${FUNCTION_SRC} ${FUNCTION_SRC_DETAIL})
+
target_include_directories(
function
PUBLIC
@@ -17,17 +20,6 @@ target_include_directories(
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc)
- SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc")
-ELSE()
- SET(LINK_JEMALLOC "")
-ENDIF()
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(function jemalloc)
-ENDIF()
-
target_link_libraries(
function
PRIVATE os
@@ -41,150 +33,6 @@ target_link_libraries(
PUBLIC uv_a
)
-if(${BUILD_TEST})
- add_executable(runUdf test/runUdf.c)
- target_include_directories(
- runUdf
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/contrib/libuv/include"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
- )
-
- target_link_libraries(
- runUdf
- PUBLIC uv_a
- PRIVATE os util common nodes function ${LINK_JEMALLOC}
- )
-endif()
-
-add_library(udf1 STATIC MODULE test/udf1.c)
-target_include_directories(
- udf1
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(udf1 jemalloc)
-ENDIF()
-
-target_link_libraries(
- udf1 PUBLIC os ${LINK_JEMALLOC})
-
-add_library(udf1_dup STATIC MODULE test/udf1_dup.c)
-target_include_directories(
- udf1_dup
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(udf1_dup jemalloc)
-ENDIF()
-
-target_link_libraries(
- udf1_dup PUBLIC os ${LINK_JEMALLOC})
-
-add_library(udf2 STATIC MODULE test/udf2.c)
-target_include_directories(
- udf2
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(udf2 jemalloc)
-ENDIF()
-
-target_link_libraries(
- udf2 PUBLIC os ${LINK_JEMALLOC}
-)
-
-add_library(udf2_dup STATIC MODULE test/udf2_dup.c)
-target_include_directories(
- udf2_dup
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(udf2_dup jemalloc)
-ENDIF()
-
-target_link_libraries(
- udf2_dup PUBLIC os ${LINK_JEMALLOC}
-)
-
-set(TARGET_NAMES
- change_udf_normal
- change_udf_no_init
- change_udf_no_process
- change_udf_no_destroy
- change_udf_init_failed
- change_udf_process_failed
- change_udf_destory_failed
-)
-
-set(COMPILE_DEFINITIONS
- CHANGE_UDF_NORMAL
- CHANGE_UDF_NO_INIT
- CHANGE_UDF_NO_PROCESS
- CHANGE_UDF_NO_DESTROY
- CHANGE_UDF_INIT_FAILED
- CHANGE_UDF_PROCESS_FAILED
- CHANGE_UDF_DESTORY_FAILED
-)
-
-foreach(index RANGE 0 6)
- list(GET TARGET_NAMES ${index} target_name)
- list(GET COMPILE_DEFINITIONS ${index} compile_def)
-
- add_library(${target_name} STATIC MODULE test/change_udf.c)
- target_include_directories(
- ${target_name}
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
- )
- target_compile_definitions(${target_name} PRIVATE ${compile_def})
- IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(${target_name} jemalloc)
- ENDIF()
- target_link_libraries(
- ${target_name} PUBLIC os ${LINK_JEMALLOC}
- )
-endforeach()
-
-# SET(EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR}/build/bin)
add_executable(udfd src/udfd.c)
if(${TD_DARWIN})
@@ -203,12 +51,12 @@ target_include_directories(
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(udfd jemalloc)
-ENDIF()
-
target_link_libraries(
udfd
PUBLIC uv_a
- PRIVATE os util common nodes function ${LINK_JEMALLOC}
+ PRIVATE os util common nodes function
)
+
+if(${BUILD_TEST})
+ add_subdirectory(test)
+endif()
diff --git a/source/libs/function/test/CMakeLists.txt b/source/libs/function/test/CMakeLists.txt
new file mode 100644
index 0000000000..d805adff3a
--- /dev/null
+++ b/source/libs/function/test/CMakeLists.txt
@@ -0,0 +1,60 @@
+set(TD_UDF_INC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/contrib/libuv/include"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ "${CMAKE_CURRENT_SOURCE_DIR}/../inc"
+)
+
+set(TD_UDF_LIB uv_a PRIVATE os util common nodes function)
+
+add_executable(runUdf runUdf.c)
+target_include_directories(runUdf PUBLIC ${TD_UDF_INC})
+target_link_libraries(runUdf PUBLIC ${TD_UDF_LIB})
+
+add_library(udf1 STATIC MODULE udf1.c)
+target_include_directories(udf1 PUBLIC ${TD_UDF_INC})
+target_link_libraries(udf1 PUBLIC os)
+
+add_library(udf2 STATIC MODULE udf2.c)
+target_include_directories(udf2 PUBLIC ${TD_UDF_INC})
+target_link_libraries(udf2 PUBLIC os)
+
+add_library(udf1_dup STATIC MODULE udf1_dup.c)
+target_include_directories(udf1_dup PUBLIC ${TD_UDF_INC})
+target_link_libraries(udf1_dup PUBLIC os)
+
+add_library(udf2_dup STATIC MODULE udf2_dup.c)
+target_include_directories(udf2_dup PUBLIC ${TD_UDF_INC})
+target_link_libraries(udf2_dup PUBLIC os)
+
+set(TARGET_NAMES
+ change_udf_normal
+ change_udf_no_init
+ change_udf_no_process
+ change_udf_no_destroy
+ change_udf_init_failed
+ change_udf_process_failed
+ change_udf_destory_failed
+)
+
+set(COMPILE_DEFINITIONS
+ CHANGE_UDF_NORMAL
+ CHANGE_UDF_NO_INIT
+ CHANGE_UDF_NO_PROCESS
+ CHANGE_UDF_NO_DESTROY
+ CHANGE_UDF_INIT_FAILED
+ CHANGE_UDF_PROCESS_FAILED
+ CHANGE_UDF_DESTORY_FAILED
+)
+
+foreach(index RANGE 0 6)
+ list(GET TARGET_NAMES ${index} target_name)
+ list(GET COMPILE_DEFINITIONS ${index} compile_def)
+ add_library(${target_name} STATIC MODULE change_udf.c)
+ target_include_directories(${target_name} PUBLIC ${TD_UDF_INC})
+ target_compile_definitions(${target_name} PRIVATE ${compile_def})
+ target_link_libraries(${target_name} PUBLIC os)
+endforeach()
\ No newline at end of file
diff --git a/source/libs/function/test/runUdf.c b/source/libs/function/test/runUdf.c
index f28b44d1b8..0c47247e21 100644
--- a/source/libs/function/test/runUdf.c
+++ b/source/libs/function/test/runUdf.c
@@ -1,6 +1,3 @@
-#include
-#include
-#include
#include "uv.h"
#include "fnLog.h"
From 6bc6223db31fd332d8c2b950ed39bf487c274584 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 16:47:07 +0800
Subject: [PATCH 051/105] fix: fix memory release bug & return value error
---
tools/taos-tools/src/benchCsv.c | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 850d3b9a98..36f92c99e8 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -705,7 +705,7 @@ static void csvFreeCtbTagData(CsvThreadMeta* thread_meta, CsvRowTagsBuf* tags_bu
for (uint64_t i = 0 ; i < thread_meta->ctb_count; ++i) {
char* tags_buf = tags_buf_array[i].buf;
if (tags_buf) {
- tmfree(tags_buf_array);
+ tmfree(tags_buf);
} else {
break;
}
@@ -738,7 +738,6 @@ static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta*
}
tags_buf_array[i].buf = tags_buf;
- thread_meta->tags_buf_size = tags_buf_size;
ret = csvGenRowTagData(tags_buf, tags_buf_size, stb, thread_meta->ctb_start_idx + i, &tk);
if (ret <= 0) {
@@ -747,6 +746,7 @@ static CsvRowTagsBuf* csvGenCtbTagData(CsvWriteMeta* write_meta, CsvThreadMeta*
tags_buf_array[i].length = ret;
}
+ thread_meta->tags_buf_size = tags_buf_size;
return tags_buf_array;
@@ -978,7 +978,7 @@ static void* csvGenStbThread(void* arg) {
for (ctb_idx = 0; ctb_idx < thread_meta->ctb_count; ++ctb_idx) {
for (slice_ctb_cur_ts = slice_cur_ts; slice_ctb_cur_ts < slice_batch_ts; slice_ctb_cur_ts += write_meta->stb->timestamp_step) {
ret = csvWriteFile(fhdl, ctb_idx, slice_ctb_cur_ts, &ck, write_meta, thread_meta);
- if (!ret) {
+ if (ret) {
errorPrint("Failed to write csv file. thread index: %zu, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
csvClose(fhdl);
@@ -1071,7 +1071,7 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
csvInitThreadMeta(write_meta, i + 1, &arg->thread_meta);
ret = pthread_create(&pids[i], NULL, csvGenStbThread, arg);
- if (!ret) {
+ if (ret) {
perror("Failed to create thread");
goto end;
}
From 7eef6659ace4907b84757140488a51e20f78dc55 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 17:28:26 +0800
Subject: [PATCH 052/105] fix: fix bug that counted the number of child tables
handled by threads incorrectly
---
tools/taos-tools/src/benchCsv.c | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 36f92c99e8..61334c418d 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -114,7 +114,7 @@ static void csvCalcCtbRange(CsvThreadMeta* thread_meta, size_t total_threads, in
thread_meta->ctb_start_idx = ctb_start_idx;
thread_meta->ctb_end_idx = ctb_end_idx;
- thread_meta->ctb_count = ctb_count;
+ thread_meta->ctb_count = ctb_end_idx - ctb_start_idx;
return;
}
@@ -385,7 +385,10 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
if (pos <= 0 || pos >= size) return -1;
}
- infoPrint("create stable: <%s>\n", buf);
+ pos += snprintf(buf + pos, size - pos, "\n");
+ if (pos <= 0 || pos >= size) return -1;
+
+ // infoPrint("create stable: <%s>.\n", buf);
return (pos > 0 && pos < size) ? pos : -1;
}
@@ -677,7 +680,7 @@ static int csvGenRowTagData(char* buf, int size, SSuperTable* stb, int64_t index
}
// tbname
- int pos = snprintf(buf, size, "\'%s%"PRId64"\'", stb->childTblPrefix, index);
+ int pos = snprintf(buf, size, ",'%s%"PRId64"'", stb->childTblPrefix, index);
// tags
pos += csvGenRowFields(buf + pos, size - pos, stb, GEN_ROW_FIELDS_TAG, k);
@@ -968,6 +971,7 @@ static void* csvGenStbThread(void* arg) {
slice_cur_ts = cur_ts;
slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
file_rows = 0;
+ pre_print_ts = toolsGetTimestampMs();
infoPrint("thread[%zu] begin to write csv file: %s.\n", thread_meta->thread_id, fullname);
@@ -1000,7 +1004,7 @@ static void* csvGenStbThread(void* arg) {
}
- if (!g_arguments->terminate) {
+ if (g_arguments->terminate) {
csvClose(fhdl);
goto end;
}
From 1d435faaa2530011173d735e11dcc61fa6e8b7f2 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Mon, 3 Mar 2025 17:42:19 +0800
Subject: [PATCH 053/105] ehn:TD-33933-decouple-send-heartbeat
---
include/util/ttimer.h | 3 +++
source/libs/sync/src/syncMain.c | 12 +++++------
source/util/src/tsched.c | 2 ++
source/util/src/ttimer.c | 38 +++++++++++++++++++++++++--------
4 files changed, 40 insertions(+), 15 deletions(-)
diff --git a/include/util/ttimer.h b/include/util/ttimer.h
index 53a8f0a19f..3c0b716f58 100644
--- a/include/util/ttimer.h
+++ b/include/util/ttimer.h
@@ -43,6 +43,9 @@ bool taosTmrIsStopped(tmr_h* timerId);
bool taosTmrReset(TAOS_TMR_CALLBACK fp, int32_t mseconds, void *param, void *handle, tmr_h *pTmrId);
+bool taosTmrResetPriority(TAOS_TMR_CALLBACK fp, int32_t mseconds, void *param, void *handle, tmr_h *pTmrId,
+ uint8_t priority);
+
#ifdef __cplusplus
}
#endif
diff --git a/source/libs/sync/src/syncMain.c b/source/libs/sync/src/syncMain.c
index cb7eb59bd0..7ee29d278c 100644
--- a/source/libs/sync/src/syncMain.c
+++ b/source/libs/sync/src/syncMain.c
@@ -1014,8 +1014,8 @@ static int32_t syncHbTimerStart(SSyncNode* pSyncNode, SSyncTimer* pSyncTimer) {
sTrace("vgId:%d, start hb timer, rid:%" PRId64 " addr:%" PRId64 " at %d", pSyncNode->vgId, pData->rid,
pData->destId.addr, pSyncTimer->timerMS);
- bool stopped = taosTmrReset(pSyncTimer->timerCb, pSyncTimer->timerMS, (void*)(pData->rid), syncEnv()->pTimerManager,
- &pSyncTimer->pTimer);
+ bool stopped = taosTmrResetPriority(pSyncTimer->timerCb, pSyncTimer->timerMS, (void*)(pData->rid),
+ syncEnv()->pTimerManager, &pSyncTimer->pTimer, 2);
if (stopped) {
sError("vgId:%d, failed to reset hb timer success", pSyncNode->vgId);
return TSDB_CODE_SYN_INTERNAL_ERROR;
@@ -1663,8 +1663,8 @@ ESyncStrategy syncNodeStrategy(SSyncNode* pSyncNode) { return pSyncNode->raftCfg
int32_t syncNodeStartPingTimer(SSyncNode* pSyncNode) {
int32_t code = 0;
if (syncIsInit()) {
- bool stopped = taosTmrReset(pSyncNode->FpPingTimerCB, pSyncNode->pingTimerMS, (void*)pSyncNode->rid,
- syncEnv()->pTimerManager, &pSyncNode->pPingTimer);
+ bool stopped = taosTmrResetPriority(pSyncNode->FpPingTimerCB, pSyncNode->pingTimerMS, (void*)pSyncNode->rid,
+ syncEnv()->pTimerManager, &pSyncNode->pPingTimer, 2);
if (stopped) {
sError("vgId:%d, failed to reset ping timer, ms:%d", pSyncNode->vgId, pSyncNode->pingTimerMS);
return TSDB_CODE_SYN_INTERNAL_ERROR;
@@ -2801,8 +2801,8 @@ static void syncNodeEqPeerHeartbeatTimer(void* param, void* tmrId) {
if (syncIsInit()) {
sTrace("vgId:%d, reset peer hb timer at %d", pSyncNode->vgId, pSyncTimer->timerMS);
- bool stopped = taosTmrReset(syncNodeEqPeerHeartbeatTimer, pSyncTimer->timerMS, (void*)hbDataRid,
- syncEnv()->pTimerManager, &pSyncTimer->pTimer);
+ bool stopped = taosTmrResetPriority(syncNodeEqPeerHeartbeatTimer, pSyncTimer->timerMS, (void*)hbDataRid,
+ syncEnv()->pTimerManager, &pSyncTimer->pTimer, 2);
if (stopped) sError("vgId:%d, reset peer hb timer error, %s", pSyncNode->vgId, tstrerror(code));
} else {
diff --git a/source/util/src/tsched.c b/source/util/src/tsched.c
index 8c708ac6b5..108168db55 100644
--- a/source/util/src/tsched.c
+++ b/source/util/src/tsched.c
@@ -147,6 +147,8 @@ void *taosProcessSchedQueue(void *scheduler) {
char name[16] = {0};
snprintf(name, tListLen(name), "%s-taskQ", pSched->label);
setThreadName(name);
+ int64_t pid = taosGetSelfPthreadId();
+ uInfo("scheduler %s is started, thread:%" PRId64, pSched->label, pid);
while (1) {
if ((ret = tsem_wait(&pSched->fullSem)) != 0) {
diff --git a/source/util/src/ttimer.c b/source/util/src/ttimer.c
index ec300c5206..d6f47fb81b 100644
--- a/source/util/src/ttimer.c
+++ b/source/util/src/ttimer.c
@@ -89,6 +89,7 @@ typedef struct tmr_obj_t {
};
TAOS_TMR_CALLBACK fp;
void* param;
+ uint8_t priority;
} tmr_obj_t;
typedef struct timer_list_t {
@@ -118,6 +119,7 @@ static TdThreadMutex tmrCtrlMutex;
static tmr_ctrl_t* tmrCtrls;
static tmr_ctrl_t* unusedTmrCtrl = NULL;
static void* tmrQhandle;
+static void* tmrQhandleHigh;
static int32_t numOfTmrCtrl = 0;
int32_t taosTmrThreads = 1;
@@ -316,22 +318,30 @@ static void addToExpired(tmr_obj_t* head) {
schedMsg.msg = NULL;
schedMsg.ahandle = head;
schedMsg.thandle = NULL;
- if (taosScheduleTask(tmrQhandle, &schedMsg) != 0) {
- tmrError("%s failed to add expired timer[id=%" PRIuPTR "] to queue.", head->ctrl->label, id);
+ if (head->priority == 1) {
+ if (taosScheduleTask(tmrQhandle, &schedMsg) != 0) {
+ tmrError("%s failed to add expired timer[id=%" PRIuPTR "] to queue.", head->ctrl->label, id);
+ }
+ } else if (head->priority == 2) {
+ if (taosScheduleTask(tmrQhandleHigh, &schedMsg) != 0) {
+ tmrError("%s failed to add expired timer[id=%" PRIuPTR "] to high level queue.", head->ctrl->label, id);
+ }
}
- tmrDebug("timer[id=%" PRIuPTR "] has been added to queue.", id);
+ tmrDebug("timer[id=%" PRIuPTR "] has been added to queue priority:%d.", id, head->priority);
head = next;
}
}
-static uintptr_t doStartTimer(tmr_obj_t* timer, TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, tmr_ctrl_t* ctrl) {
+static uintptr_t doStartTimer(tmr_obj_t* timer, TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, tmr_ctrl_t* ctrl,
+ uint8_t priority) {
uintptr_t id = getNextTimerId();
timer->id = id;
timer->state = TIMER_STATE_WAITING;
timer->fp = fp;
timer->param = param;
timer->ctrl = ctrl;
+ timer->priority = priority;
addTimer(timer);
const char* fmt = "%s timer[id=%" PRIuPTR ", fp=%p, param=%p] started";
@@ -349,7 +359,7 @@ static uintptr_t doStartTimer(tmr_obj_t* timer, TAOS_TMR_CALLBACK fp, int32_t ms
return id;
}
-tmr_h taosTmrStart(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle) {
+tmr_h taosTmrStartPriority(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle, uint8_t priority) {
tmr_ctrl_t* ctrl = (tmr_ctrl_t*)handle;
if (ctrl == NULL || ctrl->label[0] == 0) {
return NULL;
@@ -361,7 +371,11 @@ tmr_h taosTmrStart(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* ha
return NULL;
}
- return (tmr_h)doStartTimer(timer, fp, mseconds, param, ctrl);
+ return (tmr_h)doStartTimer(timer, fp, mseconds, param, ctrl, priority);
+}
+
+tmr_h taosTmrStart(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle) {
+ return taosTmrStartPriority(fp, mseconds, param, handle, 1);
}
static void taosTimerLoopFunc(int32_t signo) {
@@ -488,7 +502,8 @@ bool taosTmrIsStopped(tmr_h* timerId) {
return (state == TIMER_STATE_CANCELED) || (state == TIMER_STATE_STOPPED);
}
-bool taosTmrReset(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle, tmr_h* pTmrId) {
+bool taosTmrResetPriority(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle, tmr_h* pTmrId,
+ uint8_t priority) {
tmr_ctrl_t* ctrl = (tmr_ctrl_t*)handle;
if (ctrl == NULL || ctrl->label[0] == 0) {
return false;
@@ -509,7 +524,7 @@ bool taosTmrReset(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* han
}
if (timer == NULL) {
- *pTmrId = taosTmrStart(fp, mseconds, param, handle);
+ *pTmrId = taosTmrStartPriority(fp, mseconds, param, handle, priority);
if (NULL == *pTmrId) {
stopped = true;
}
@@ -530,11 +545,15 @@ bool taosTmrReset(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* han
uError("timer refCount=%d not expected 1", timer->refCount);
}
memset(timer, 0, sizeof(*timer));
- *pTmrId = (tmr_h)doStartTimer(timer, fp, mseconds, param, ctrl);
+ *pTmrId = (tmr_h)doStartTimer(timer, fp, mseconds, param, ctrl, priority);
return stopped;
}
+bool taosTmrReset(TAOS_TMR_CALLBACK fp, int32_t mseconds, void* param, void* handle, tmr_h* pTmrId) {
+ return taosTmrResetPriority(fp, mseconds, param, handle, pTmrId, 1);
+}
+
static int32_t taosTmrModuleInit(void) {
tmrCtrls = taosMemoryMalloc(sizeof(tmr_ctrl_t) * tsMaxTmrCtrl);
if (tmrCtrls == NULL) {
@@ -578,6 +597,7 @@ static int32_t taosTmrModuleInit(void) {
}
tmrQhandle = taosInitScheduler(10000, taosTmrThreads, "tmr", NULL);
+ tmrQhandleHigh = taosInitScheduler(10000, taosTmrThreads, "high-tmr", NULL);
if (taosInitTimer(taosTimerLoopFunc, MSECONDS_PER_TICK) != 0) {
tmrError("failed to initialize timer");
}
From cd958c9f17ce240221ea3ff56b5466705d67d189 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 17:59:58 +0800
Subject: [PATCH 054/105] feat: print csv export mode
---
tools/taos-tools/inc/benchCsv.h | 3 ++-
tools/taos-tools/src/benchCsv.c | 6 ++++++
2 files changed, 8 insertions(+), 1 deletion(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index f9f87aa341..e80f73bcda 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -57,7 +57,8 @@ typedef struct {
typedef struct {
CsvNamingType naming_type;
size_t total_threads;
- char thread_formatter[TINY_BUFF_LEN];
+ char mode[MIDDLE_BUFF_LEN];
+ char thread_formatter[SMALL_BUFF_LEN];
char csv_header[LARGE_BUFF_LEN];
int csv_header_length;
SDataBase* db;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 61334c418d..32a48e70d0 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -479,18 +479,22 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace|no-time-slice");
break;
}
case CSV_NAMING_I_TIME_SLICE: {
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace|time-slice");
csvCalcTimestampStep(write_meta);
break;
}
case CSV_NAMING_B_THREAD: {
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch|no-time-slice");
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch|time-slice");
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
csvGenThreadFormatter(write_meta);
csvCalcTimestampStep(write_meta);
@@ -1056,6 +1060,8 @@ static int csvGenStbProcess(SDataBase* db, SSuperTable* stb) {
goto end;
}
+ infoPrint("export csv mode: %s.\n", write_meta->mode);
+
args = benchCalloc(write_meta->total_threads, sizeof(CsvThreadArgs), false);
if (!args) {
ret = -1;
From 56ac7c9ef4281b3e0374a71ac04f3f0c544b85a2 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Mon, 3 Mar 2025 20:07:09 +0800
Subject: [PATCH 055/105] feat: add the number of threads in mode
---
tools/taos-tools/src/benchCsv.c | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 32a48e70d0..cf8527d375 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -385,7 +385,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
if (pos <= 0 || pos >= size) return -1;
}
- pos += snprintf(buf + pos, size - pos, "\n");
+ pos += snprintf(buf + pos, size - pos, ";\n");
if (pos <= 0 || pos >= size) return -1;
// infoPrint("create stable: <%s>.\n", buf);
@@ -488,14 +488,14 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
break;
}
case CSV_NAMING_B_THREAD: {
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch|no-time-slice");
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]|no-time-slice", write_meta->total_threads);
csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch|time-slice");
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]|time-slice", write_meta->total_threads);
csvGenThreadFormatter(write_meta);
csvCalcTimestampStep(write_meta);
break;
From 68ba80362342ac70e9565815251403f1998d8602 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Mon, 3 Mar 2025 23:11:03 +0800
Subject: [PATCH 056/105] refactor: cmake files
---
CMakeLists.txt | 26 +++++++++-----------------
utils/CMakeLists.txt | 4 ++++
2 files changed, 13 insertions(+), 17 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ac07a6d1e3..2be056ec4e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -11,37 +11,29 @@ if(NOT DEFINED TD_SOURCE_DIR)
endif()
SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
-
set(TD_SUPPORT_DIR "${TD_SOURCE_DIR}/cmake")
set(TD_CONTRIB_DIR "${TD_SOURCE_DIR}/contrib")
include(${TD_SUPPORT_DIR}/cmake.platform)
-include(${TD_SUPPORT_DIR}/cmake.define)
include(${TD_SUPPORT_DIR}/cmake.options)
+include(${TD_SUPPORT_DIR}/cmake.define)
include(${TD_SUPPORT_DIR}/cmake.version)
-
-# contrib
-add_subdirectory(contrib)
+include(${TD_SUPPORT_DIR}/cmake.install)
set_property(GLOBAL PROPERTY GLOBAL_DEPENDS_NO_CYCLES OFF)
-# api
add_library(api INTERFACE)
target_include_directories(api INTERFACE "include/client")
-# src
-if(${BUILD_TEST})
- include(CTest)
- enable_testing()
- add_subdirectory(examples/c)
- add_subdirectory(utils/test/c)
-endif(${BUILD_TEST})
-
+add_subdirectory(contrib)
add_subdirectory(source)
add_subdirectory(tools)
add_subdirectory(utils)
add_subdirectory(tests)
-include(${TD_SUPPORT_DIR}/cmake.install)
-
-# docs
add_subdirectory(docs/doxgen)
+
+if(${BUILD_TEST})
+ include(CTest)
+ enable_testing()
+ add_subdirectory(examples/c)
+endif(${BUILD_TEST})
\ No newline at end of file
diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt
index dae610ff69..a6377f15a8 100644
--- a/utils/CMakeLists.txt
+++ b/utils/CMakeLists.txt
@@ -1,6 +1,10 @@
# ADD_SUBDIRECTORY(examples/c)
ADD_SUBDIRECTORY(tsim)
+IF(${BUILD_TEST})
+ ADD_SUBDIRECTORY(test)
+ENDIF(${BUILD_TEST})
+
# ADD_SUBDIRECTORY(comparisonTest/tdengine)
IF(NOT "${TSZ_ENABLED}" MATCHES "false")
ADD_SUBDIRECTORY(TSZ)
From 379134fd6c188c0c269b0e5064cc001c0a1b78e4 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Tue, 4 Mar 2025 09:41:15 +0800
Subject: [PATCH 057/105] ehn:TD-33933-decouple-send-heartbeat-fix-case
---
source/util/src/tsched.c | 2 +-
source/util/src/ttimer.c | 11 ++++++++---
2 files changed, 9 insertions(+), 4 deletions(-)
diff --git a/source/util/src/tsched.c b/source/util/src/tsched.c
index 108168db55..9f0392c418 100644
--- a/source/util/src/tsched.c
+++ b/source/util/src/tsched.c
@@ -148,7 +148,7 @@ void *taosProcessSchedQueue(void *scheduler) {
snprintf(name, tListLen(name), "%s-taskQ", pSched->label);
setThreadName(name);
int64_t pid = taosGetSelfPthreadId();
- uInfo("scheduler %s is started, thread:%" PRId64, pSched->label, pid);
+ uInfo("scheduler %s is started, thread:%" PRId64, name, pid);
while (1) {
if ((ret = tsem_wait(&pSched->fullSem)) != 0) {
diff --git a/source/util/src/ttimer.c b/source/util/src/ttimer.c
index d6f47fb81b..67f76c6393 100644
--- a/source/util/src/ttimer.c
+++ b/source/util/src/ttimer.c
@@ -318,17 +318,22 @@ static void addToExpired(tmr_obj_t* head) {
schedMsg.msg = NULL;
schedMsg.ahandle = head;
schedMsg.thandle = NULL;
- if (head->priority == 1) {
+ uint8_t priority = head->priority;
+
+ if (priority == 1) {
if (taosScheduleTask(tmrQhandle, &schedMsg) != 0) {
tmrError("%s failed to add expired timer[id=%" PRIuPTR "] to queue.", head->ctrl->label, id);
}
- } else if (head->priority == 2) {
+ } else if (priority == 2) {
if (taosScheduleTask(tmrQhandleHigh, &schedMsg) != 0) {
tmrError("%s failed to add expired timer[id=%" PRIuPTR "] to high level queue.", head->ctrl->label, id);
}
}
+ else{
+ tmrError("%s invalid priority level %d for timer[id=%" PRIuPTR "].", head->ctrl->label, priority, id);
+ }
- tmrDebug("timer[id=%" PRIuPTR "] has been added to queue priority:%d.", id, head->priority);
+ tmrDebug("timer[id=%" PRIuPTR "] has been added to queue priority:%d.", id, priority);
head = next;
}
}
From e2b18a71822e346b1bfdbc397fa60c487d034dfc Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 10:04:58 +0800
Subject: [PATCH 058/105] End(insert):Use cache to improve auto create table
performance.
---
include/common/tmsg.h | 2 +
include/libs/catalog/catalog.h | 1 +
source/client/test/stmt2Test.cpp | 4 +-
source/common/src/msg/tmsg.c | 6 ++
source/dnode/vnode/src/vnd/vnodeQuery.c | 6 ++
source/libs/catalog/inc/catalogInt.h | 1 +
source/libs/catalog/src/ctgAsync.c | 3 +
source/libs/catalog/src/ctgRemote.c | 1 +
source/libs/parser/src/parInsertSql.c | 112 ++++++++++++++++++++----
9 files changed, 120 insertions(+), 16 deletions(-)
diff --git a/include/common/tmsg.h b/include/common/tmsg.h
index 5d4af4cd08..37bf9c8c34 100644
--- a/include/common/tmsg.h
+++ b/include/common/tmsg.h
@@ -490,6 +490,7 @@ typedef enum ENodeType {
typedef struct {
int32_t vgId;
uint8_t option; // 0x0 REQ_OPT_TBNAME, 0x01 REQ_OPT_TBUID
+ uint8_t autoCreateCtb; // 0x0 not auto create, 0x01 auto create
const char* dbFName;
const char* tbName;
} SBuildTableInput;
@@ -2173,6 +2174,7 @@ typedef struct {
char dbFName[TSDB_DB_FNAME_LEN];
char tbName[TSDB_TABLE_NAME_LEN];
uint8_t option;
+ uint8_t autoCreateCtb;
} STableInfoReq;
int32_t tSerializeSTableInfoReq(void* buf, int32_t bufLen, STableInfoReq* pReq);
diff --git a/include/libs/catalog/catalog.h b/include/libs/catalog/catalog.h
index 7c6f02513e..93e0fdfb4c 100644
--- a/include/libs/catalog/catalog.h
+++ b/include/libs/catalog/catalog.h
@@ -79,6 +79,7 @@ typedef struct SDbInfo {
typedef struct STablesReq {
char dbFName[TSDB_DB_FNAME_LEN];
SArray* pTables;
+ uint8_t autoCreate; // 0x0 not auto create, 0x01 auto create
} STablesReq;
typedef struct SCatalogReq {
diff --git a/source/client/test/stmt2Test.cpp b/source/client/test/stmt2Test.cpp
index 6bae063124..8675f70944 100644
--- a/source/client/test/stmt2Test.cpp
+++ b/source/client/test/stmt2Test.cpp
@@ -1927,8 +1927,10 @@ TEST(stmt2Case, async_order) {
while (!stop_task) {
auto elapsed_time = std::chrono::steady_clock::now() - start_time;
if (std::chrono::duration_cast(elapsed_time).count() > 60) {
+ if (t.joinable()) {
+ t.detach();
+ }
FAIL() << "Test[stmt2_async_test] timed out";
- t.detach();
break;
}
std::this_thread::sleep_for(std::chrono::seconds(1)); // 每 1s 检查一次
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index d599799d59..b81b094882 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -6292,6 +6292,7 @@ int32_t tSerializeSTableInfoReq(void *buf, int32_t bufLen, STableInfoReq *pReq)
TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->dbFName));
TAOS_CHECK_EXIT(tEncodeCStr(&encoder, pReq->tbName));
TAOS_CHECK_EXIT(tEncodeU8(&encoder, pReq->option));
+ TAOS_CHECK_EXIT(tEncodeU8(&encoder, pReq->autoCreateCtb));
tEndEncode(&encoder);
_exit:
@@ -6331,6 +6332,11 @@ int32_t tDeserializeSTableInfoReq(void *buf, int32_t bufLen, STableInfoReq *pReq
} else {
pReq->option = 0;
}
+ if (!tDecodeIsEnd(&decoder)) {
+ TAOS_CHECK_EXIT(tDecodeU8(&decoder, &pReq->autoCreateCtb));
+ } else {
+ pReq->autoCreateCtb = 0;
+ }
tEndDecode(&decoder);
_exit:
diff --git a/source/dnode/vnode/src/vnd/vnodeQuery.c b/source/dnode/vnode/src/vnd/vnodeQuery.c
index 49dfb99499..34894825f2 100644
--- a/source/dnode/vnode/src/vnd/vnodeQuery.c
+++ b/source/dnode/vnode/src/vnd/vnodeQuery.c
@@ -90,12 +90,14 @@ int32_t vnodeGetTableMeta(SVnode *pVnode, SRpcMsg *pMsg, bool direct) {
void *pRsp = NULL;
SSchemaWrapper schema = {0};
SSchemaWrapper schemaTag = {0};
+ uint8_t autoCreateCtb = 0;
// decode req
if (tDeserializeSTableInfoReq(pMsg->pCont, pMsg->contLen, &infoReq) != 0) {
code = terrno;
goto _exit4;
}
+ autoCreateCtb = infoReq.autoCreateCtb;
if (infoReq.option == REQ_OPT_TBUID) reqTbUid = true;
metaRsp.dbId = pVnode->config.dbId;
@@ -223,6 +225,10 @@ _exit4:
rpcMsg.code = code;
rpcMsg.msgType = pMsg->msgType;
+ if (code == TSDB_CODE_PAR_TABLE_NOT_EXIST && autoCreateCtb == 1) {
+ code = TSDB_CODE_SUCCESS;
+ }
+
if (code) {
qError("get table %s meta with %" PRIu8 " failed cause of %s", infoReq.tbName, infoReq.option, tstrerror(code));
}
diff --git a/source/libs/catalog/inc/catalogInt.h b/source/libs/catalog/inc/catalogInt.h
index dd553ac301..f254a4f52c 100644
--- a/source/libs/catalog/inc/catalogInt.h
+++ b/source/libs/catalog/inc/catalogInt.h
@@ -481,6 +481,7 @@ struct SCtgTask {
typedef struct SCtgTaskReq {
SCtgTask* pTask;
int32_t msgIdx;
+ uint8_t autoCreateCtb;
} SCtgTaskReq;
typedef int32_t (*ctgInitTaskFp)(SCtgJob*, int32_t, void*);
diff --git a/source/libs/catalog/src/ctgAsync.c b/source/libs/catalog/src/ctgAsync.c
index 917d9feed6..0e7751a99e 100644
--- a/source/libs/catalog/src/ctgAsync.c
+++ b/source/libs/catalog/src/ctgAsync.c
@@ -3093,6 +3093,7 @@ int32_t ctgLaunchGetTbMetasTask(SCtgTask* pTask) {
SCtgTbMetasCtx* pCtx = (SCtgTbMetasCtx*)pTask->taskCtx;
SCtgJob* pJob = pTask->pJob;
SName* pName = NULL;
+ bool autoCreate = false;
int32_t dbNum = taosArrayGetSize(pCtx->pNames);
int32_t fetchIdx = 0;
@@ -3103,6 +3104,7 @@ int32_t ctgLaunchGetTbMetasTask(SCtgTask* pTask) {
ctgError("fail to get the %dth STablesReq, num:%d", i, dbNum);
CTG_ERR_RET(TSDB_CODE_CTG_INVALID_INPUT);
}
+ autoCreate = pReq->autoCreate;
ctgDebug("start to check tb metas in db %s, tbNum %ld", pReq->dbFName, taosArrayGetSize(pReq->pTables));
CTG_ERR_RET(ctgGetTbMetasFromCache(pCtg, pConn, pCtx, i, &fetchIdx, baseResIdx, pReq->pTables));
@@ -3143,6 +3145,7 @@ int32_t ctgLaunchGetTbMetasTask(SCtgTask* pTask) {
}
SCtgTaskReq tReq;
+ tReq.autoCreateCtb = (autoCreate && i == pCtx->fetchNum - 1) ? 1 : 0;
tReq.pTask = pTask;
tReq.msgIdx = pFetch->fetchIdx;
CTG_ERR_RET(ctgAsyncRefreshTbMeta(&tReq, pFetch->flag, pName, &pFetch->vgId));
diff --git a/source/libs/catalog/src/ctgRemote.c b/source/libs/catalog/src/ctgRemote.c
index ec93b7dee2..dca4e2d2fa 100644
--- a/source/libs/catalog/src/ctgRemote.c
+++ b/source/libs/catalog/src/ctgRemote.c
@@ -1380,6 +1380,7 @@ int32_t ctgGetTbMetaFromVnode(SCatalog* pCtg, SRequestConnInfo* pConn, const SNa
SBuildTableInput bInput = {.vgId = vgroupInfo->vgId,
.option = reqType == TDMT_VND_TABLE_NAME ? REQ_OPT_TBUID : REQ_OPT_TBNAME,
+ .autoCreateCtb = tReq->autoCreateCtb,
.dbFName = dbFName,
.tbName = (char*)tNameGetTableName(pTableName)};
char* msg = NULL;
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index 5ff6e4f555..0c60a787ce 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -35,6 +35,8 @@ typedef struct SInsertParseContext {
} SInsertParseContext;
typedef int32_t (*_row_append_fn_t)(SMsgBuf* pMsgBuf, const void* value, int32_t len, void* param);
+static int32_t parseBoundTagsClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt);
+static int32_t parseTagsClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool autoCreate);
static uint8_t TRUE_VALUE = (uint8_t)TSDB_TRUE;
static uint8_t FALSE_VALUE = (uint8_t)TSDB_FALSE;
@@ -102,6 +104,7 @@ static int32_t skipTableOptions(SInsertParseContext* pCxt, const char** pSql) {
}
// pSql -> stb_name [(tag1_name, ...)] TAGS (tag1_value, ...)
+#if 0
static int32_t ignoreUsingClause(SInsertParseContext* pCxt, const char** pSql) {
int32_t code = TSDB_CODE_SUCCESS;
SToken token;
@@ -137,6 +140,29 @@ static int32_t ignoreUsingClause(SInsertParseContext* pCxt, const char** pSql) {
return code;
}
+#else
+static int32_t ignoreUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+ const char** pSql = &pStmt->pSql;
+ int32_t code = TSDB_CODE_SUCCESS;
+ SToken token;
+ NEXT_TOKEN(*pSql, token);
+ code = parseBoundTagsClause(pCxt, pStmt);
+ if (TSDB_CODE_SUCCESS != code) {
+ return code;
+ }
+ // pSql -> TAGS (tag1_value, ...)
+ code = parseTagsClause(pCxt, pStmt, true);
+ if (TSDB_CODE_SUCCESS != code) {
+ return code;
+ }
+
+ if (TSDB_CODE_SUCCESS == code) {
+ code = skipTableOptions(pCxt, pSql);
+ }
+
+ return code;
+}
+#endif
static int32_t parseDuplicateUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool* pDuplicate) {
int32_t code = TSDB_CODE_SUCCESS;
@@ -150,10 +176,12 @@ static int32_t parseDuplicateUsingClause(SInsertParseContext* pCxt, SVnodeModify
STableMeta** pMeta = taosHashGet(pStmt->pSubTableHashObj, tbFName, strlen(tbFName));
if (NULL != pMeta) {
*pDuplicate = true;
- code = ignoreUsingClause(pCxt, &pStmt->pSql);
- if (TSDB_CODE_SUCCESS == code) {
- return cloneTableMeta(*pMeta, &pStmt->pTableMeta);
+ pCxt->missCache = false;
+ code = cloneTableMeta(*pMeta, &pStmt->pTableMeta);
+ if (TSDB_CODE_SUCCESS != code) {
+ return code;
}
+ return ignoreUsingClause(pCxt, pStmt);
}
return code;
@@ -937,7 +965,7 @@ static int32_t checkSubtablePrivilege(SArray* pTagVals, SArray* pTagName, SNode*
}
// pSql -> tag1_value, ...)
-static int32_t parseTagsClauseImpl(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+static int32_t parseTagsClauseImpl(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool autoCreate) {
int32_t code = TSDB_CODE_SUCCESS;
SSchema* pSchema = getTableTagSchema(pStmt->pTableMeta);
SArray* pTagVals = NULL;
@@ -1011,7 +1039,7 @@ _exit:
// input pStmt->pSql: TAGS (tag1_value, ...) [table_options] ...
// output pStmt->pSql: [table_options] ...
-static int32_t parseTagsClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+static int32_t parseTagsClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool autoCreate) {
SToken token;
NEXT_TOKEN(pStmt->pSql, token);
if (TK_TAGS != token.type) {
@@ -1023,7 +1051,7 @@ static int32_t parseTagsClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pS
return buildSyntaxErrMsg(&pCxt->msg, "( is expected", token.z);
}
- int32_t code = parseTagsClauseImpl(pCxt, pStmt);
+ int32_t code = parseTagsClauseImpl(pCxt, pStmt, autoCreate);
if (TSDB_CODE_SUCCESS == code) {
NEXT_VALID_TOKEN(pStmt->pSql, token);
if (TK_NK_COMMA == token.type) {
@@ -1108,7 +1136,7 @@ static int32_t parseUsingClauseBottom(SInsertParseContext* pCxt, SVnodeModifyOpS
int32_t code = parseBoundTagsClause(pCxt, pStmt);
if (TSDB_CODE_SUCCESS == code) {
- code = parseTagsClause(pCxt, pStmt);
+ code = parseTagsClause(pCxt, pStmt, false);
}
if (TSDB_CODE_SUCCESS == code) {
code = parseTableOptions(pCxt, pStmt);
@@ -1289,13 +1317,12 @@ static int32_t preParseUsingTableName(SInsertParseContext* pCxt, SVnodeModifyOpS
}
static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+ int32_t code = TSDB_CODE_SUCCESS;
if (pCxt->forceUpdate) {
pCxt->missCache = true;
return TSDB_CODE_SUCCESS;
}
-
- int32_t code = checkAuth(pCxt->pComCxt, &pStmt->usingTableName, &pCxt->missCache, &pStmt->pTagCond);
- if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
+ if (!pCxt->missCache) {
bool bUsingTable = true;
code = getTableMeta(pCxt, &pStmt->usingTableName, &pStmt->pTableMeta, &pCxt->missCache, bUsingTable);
}
@@ -1333,15 +1360,27 @@ static int32_t parseUsingTableNameImpl(SInsertParseContext* pCxt, SVnodeModifyOp
static int32_t parseUsingTableName(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
SToken token;
int32_t index = 0;
+ int32_t code = TSDB_CODE_SUCCESS;
+
NEXT_TOKEN_KEEP_SQL(pStmt->pSql, token, index);
- if (TK_USING != token.type) {
- return getTargetTableSchema(pCxt, pStmt);
+ if (pCxt->isStmtBind) {
+ if (token.type != TK_USING) {
+ return getTargetTableSchema(pCxt, pStmt);
+ }
+ } else {
+ code = getTargetTableSchema(pCxt, pStmt);
+ if (token.type != TK_USING) {
+ return code;
+ } else if ((!pCxt->missCache) && (TSDB_CODE_SUCCESS == code) && (!pCxt->isStmtBind)) {
+ pStmt->pSql += index;
+ return ignoreUsingClause(pCxt, pStmt);
+ }
}
pStmt->usingTableProcessing = true;
// pStmt->pSql -> stb_name [(tag1_name, ...)
pStmt->pSql += index;
- int32_t code = parseDuplicateUsingClause(pCxt, pStmt, &pCxt->usingDuplicateTable);
+ code = parseDuplicateUsingClause(pCxt, pStmt, &pCxt->usingDuplicateTable);
if (TSDB_CODE_SUCCESS == code && !pCxt->usingDuplicateTable) {
return parseUsingTableNameImpl(pCxt, pStmt);
}
@@ -2842,7 +2881,7 @@ static int32_t checkAuthFromMetaData(const SArray* pUsers, SNode** pTagCond) {
}
static int32_t getTableMetaFromMetaData(const SArray* pTables, STableMeta** pMeta) {
- if (1 != taosArrayGetSize(pTables)) {
+ if (1 != taosArrayGetSize(pTables) && 2 != taosArrayGetSize(pTables)) {
return TSDB_CODE_FAILED;
}
@@ -3119,6 +3158,29 @@ static int32_t parseInsertSqlImpl(SInsertParseContext* pCxt, SVnodeModifyOpStmt*
return parseInsertSqlFromTable(pCxt, pStmt);
}
+static int32_t buildUsingInsertTableReq(SName* pSName, SName* pCName, SArray** pTables) {
+ if (NULL == *pTables) {
+ *pTables = taosArrayInit(2, sizeof(SName));
+ if (NULL == *pTables) {
+ goto _err;
+ }
+ }
+ if (NULL == taosArrayPush(*pTables, pSName)) {
+ goto _err;
+ }
+ if (NULL == taosArrayPush(*pTables, pCName)) {
+ goto _err;
+ }
+ return TSDB_CODE_SUCCESS;
+
+_err:
+ if (NULL != *pTables) {
+ taosArrayDestroy(*pTables);
+ *pTables = NULL;
+ }
+ return terrno;
+}
+
static int32_t buildInsertTableReq(SName* pName, SArray** pTables) {
*pTables = taosArrayInit(1, sizeof(SName));
if (NULL == *pTables) {
@@ -3133,6 +3195,26 @@ static int32_t buildInsertTableReq(SName* pName, SArray** pTables) {
return TSDB_CODE_SUCCESS;
}
+static int32_t buildInsertUsingDbReq(SName* pSName, SName* pCName, SArray** pDbs) {
+ if (NULL == *pDbs) {
+ *pDbs = taosArrayInit(1, sizeof(STablesReq));
+ if (NULL == *pDbs) {
+ return terrno;
+ }
+ }
+
+ STablesReq req = {0};
+ req.autoCreate = 1;
+ (void)tNameGetFullDbName(pSName, req.dbFName);
+ (void)tNameGetFullDbName(pCName, req.dbFName);
+
+ int32_t code = buildUsingInsertTableReq(pSName, pCName, &req.pTables);
+ if (TSDB_CODE_SUCCESS == code && NULL == taosArrayPush(*pDbs, &req)) {
+ code = TSDB_CODE_OUT_OF_MEMORY;
+ }
+ return code;
+}
+
static int32_t buildInsertDbReq(SName* pName, SArray** pDbs) {
if (NULL == *pDbs) {
*pDbs = taosArrayInit(1, sizeof(STablesReq));
@@ -3182,7 +3264,7 @@ static int32_t buildInsertCatalogReq(SInsertParseContext* pCxt, SVnodeModifyOpSt
if (0 == pStmt->usingTableName.type) {
code = buildInsertDbReq(&pStmt->targetTableName, &pCatalogReq->pTableMeta);
} else {
- code = buildInsertDbReq(&pStmt->usingTableName, &pCatalogReq->pTableMeta);
+ code = buildInsertUsingDbReq(&pStmt->usingTableName, &pStmt->targetTableName, &pCatalogReq->pTableMeta);
}
}
if (TSDB_CODE_SUCCESS == code) {
From b5ea56d80999d1d8b8dd6d530999a44923c3272d Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 11:12:41 +0800
Subject: [PATCH 059/105] Test(insert):Add some test for auto create.
---
source/common/src/msg/tmsg.c | 6 +-
source/libs/parser/src/parInsertSql.c | 2 +-
tests/army/insert/auto_create_bench.py | 114 +++++++++++++++
tests/army/insert/auto_create_insert.py | 179 ++++++++++++++++++++++++
4 files changed, 299 insertions(+), 2 deletions(-)
create mode 100644 tests/army/insert/auto_create_bench.py
create mode 100644 tests/army/insert/auto_create_insert.py
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index b81b094882..1625d11943 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -11952,7 +11952,11 @@ int32_t tEncodeSubmitReq(SEncoder *pCoder, const SSubmitReq2 *pReq) {
}
} else{
for (uint64_t i = 0; i < taosArrayGetSize(pReq->aSubmitTbData); i++) {
- TAOS_CHECK_EXIT(tEncodeSSubmitTbData(pCoder, taosArrayGet(pReq->aSubmitTbData, i)));
+ SSubmitTbData *pSubmitTbData = taosArrayGet(pReq->aSubmitTbData, i);
+ if ((pSubmitTbData->flags & SUBMIT_REQ_AUTO_CREATE_TABLE) && pSubmitTbData->pCreateTbReq == NULL) {
+ pSubmitTbData->flags = 0;
+ }
+ TAOS_CHECK_EXIT(tEncodeSSubmitTbData(pCoder, pSubmitTbData));
}
}
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index 0c60a787ce..74fb667733 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -1019,7 +1019,7 @@ static int32_t parseTagsClauseImpl(SInsertParseContext* pCxt, SVnodeModifyOpStmt
code = tTagNew(pTagVals, 1, false, &pTag);
}
- if (TSDB_CODE_SUCCESS == code && !isParseBindParam) {
+ if (TSDB_CODE_SUCCESS == code && !isParseBindParam && !autoCreate) {
code = buildCreateTbReq(pStmt, pTag, pTagName);
pTag = NULL;
}
diff --git a/tests/army/insert/auto_create_bench.py b/tests/army/insert/auto_create_bench.py
new file mode 100644
index 0000000000..0996aac87a
--- /dev/null
+++ b/tests/army/insert/auto_create_bench.py
@@ -0,0 +1,114 @@
+import time
+import taos
+
+conn = taos.connect()
+
+total_batches = 100
+tables_per_batch = 100
+
+def prepare_database():
+ cursor = conn.cursor()
+ cursor.execute("DROP DATABASE IF EXISTS test")
+ cursor.execute("CREATE DATABASE IF NOT EXISTS test")
+ cursor.execute("USE test")
+ cursor.execute("CREATE STABLE IF NOT EXISTS stb (ts TIMESTAMP, a INT, b FLOAT, c BINARY(10)) TAGS (e_id INT)")
+ cursor.close()
+
+def test_auto_create_tables():
+ """测试场景1:自动建表插入"""
+ cursor = conn.cursor()
+ cursor.execute("USE test")
+ print("开始测试自动建表插入...")
+
+ start_time = time.time()
+ for _ in range(100):
+ for batch in range(total_batches):
+ # 生成当前批次的子表ID范围
+ start_id = batch * tables_per_batch
+ end_id = start_id + tables_per_batch
+
+ # 构建批量插入SQL
+ sql_parts = []
+ for i in range(start_id, end_id):
+ sql_part = f"t_{i} USING stb TAGS ({i}) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')"
+ sql_parts.append(sql_part)
+
+ # 执行批量插入
+ full_sql = "INSERT INTO " + " ".join(sql_parts)
+ cursor.execute(full_sql)
+
+
+ elapsed = time.time() - start_time
+ print(f"自动建表插入耗时: {elapsed:.2f} 秒")
+
+ cursor.close()
+ return elapsed
+
+def precreate_tables():
+ """预处理:创建所有子表结构"""
+ cursor = conn.cursor()
+ cursor.execute("USE test")
+
+ print("\n开始预创建子表...")
+ start_time = time.time()
+
+ for batch in range(total_batches):
+ start_id = batch * tables_per_batch
+ end_id = start_id + tables_per_batch
+
+ for i in range(start_id, end_id):
+ sql_part = f"CREATE TABLE t_{i} USING stb TAGS ({i})"
+ cursor.execute(sql_part)
+
+ elapsed = time.time() - start_time
+ print(f"子表预创建耗时: {elapsed:.2f} 秒")
+
+ cursor.close()
+
+def test_direct_insert():
+ """测试场景2:直接插入已存在的子表"""
+ cursor = conn.cursor()
+ cursor.execute("USE test")
+
+ print("\n开始测试直接插入...")
+ start_time = time.time()
+ for _ in range(100):
+ for batch in range(total_batches):
+ start_id = batch * tables_per_batch
+ end_id = start_id + tables_per_batch
+
+ # 构建批量插入SQL
+ sql_parts = []
+ for i in range(start_id, end_id):
+ sql_part = f"t_{i} VALUES ('2024-01-01 00:00:01', 1, 2.0, 'test')"
+ sql_parts.append(sql_part)
+
+ # 执行批量插入
+ full_sql = "INSERT INTO " + " ".join(sql_parts)
+ cursor.execute(full_sql)
+
+ elapsed = time.time() - start_time
+ print(f"直接插入耗时: {elapsed:.2f} 秒")
+
+ cursor.close()
+ return elapsed
+
+if __name__ == "__main__":
+ # 初始化数据库环境
+ prepare_database()
+ # 预创建所有子表
+ precreate_tables()
+ # 测试场景1:自动建表插入
+ auto_create_time = test_auto_create_tables()
+ # 清理环境并重新初始化
+ prepare_database()
+ # 预创建所有子表
+ precreate_tables()
+ # 测试场景2:直接插入
+ direct_insert_time = test_direct_insert()
+
+ # 打印最终结果
+ print("\n测试结果对比:")
+ print(f"自动建表插入耗时: {auto_create_time:.2f} 秒")
+ print(f"直接插入耗时: {direct_insert_time:.2f} 秒")
+ print(f"性能差异: {auto_create_time/direct_insert_time:.1f} 倍")
\ No newline at end of file
diff --git a/tests/army/insert/auto_create_insert.py b/tests/army/insert/auto_create_insert.py
new file mode 100644
index 0000000000..4137c0018c
--- /dev/null
+++ b/tests/army/insert/auto_create_insert.py
@@ -0,0 +1,179 @@
+###################################################################
+# Copyright (c) 2016 by TAOS Technologies, Inc.
+# All rights reserved.
+#
+# This file is proprietary and confidential to TAOS Technologies.
+# No part of this file may be reproduced, stored, transmitted,
+# disclosed or used in any form or by any means other than as
+# expressly provided by the written permission from Jianhui Tao
+#
+###################################################################
+
+# -*- coding: utf-8 -*-
+
+import sys
+import time
+import random
+
+import taos
+import frame
+import frame.etool
+
+
+from frame.log import *
+from frame.cases import *
+from frame.sql import *
+from frame.caseBase import *
+from frame import *
+
+
+class TDTestCase(TBase):
+
+ def prepare_database(self):
+ tdLog.info(f"prepare database")
+ tdSql.execute("DROP DATABASE IF EXISTS test")
+ tdSql.execute("CREATE DATABASE IF NOT EXISTS test")
+ tdSql.execute("USE test")
+ tdSql.execute("CREATE STABLE IF NOT EXISTS stb (ts TIMESTAMP, a INT, b FLOAT, c BINARY(10)) TAGS (e_id INT)")
+
+
+ def insert_table_auto_create(self):
+ tdLog.info(f"insert table auto create")
+ tdSql.execute("USE test")
+ tdLog.info("start to test auto create insert...")
+ tdSql.execute("INSERT INTO t_0 USING stb TAGS (0) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.execute("INSERT INTO t_0 USING stb TAGS (0) VALUES ('2024-01-01 00:00:01', 1, 2.0, 'test')")
+ tdSql.query("select * from t_0")
+ tdSql.checkRows(2)
+
+ def insert_table_pre_create(self):
+ tdLog.info(f"insert table pre create")
+ tdSql.execute("USE test")
+ tdLog.info("start to pre create table...")
+ tdSql.execute("CREATE TABLE t_1 USING stb TAGS (1)")
+ tdLog.info("start to test pre create insert...")
+ tdSql.execute("INSERT INTO t_1 USING stb TAGS (1) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.execute("INSERT INTO t_1 VALUES ('2024-01-01 00:00:01', 1, 2.0, 'test')")
+ tdSql.query("select * from t_1")
+ tdSql.checkRows(2)
+
+ def insert_table_auto_insert_with_cache(self):
+ tdLog.info(f"insert table auto insert with cache")
+ tdSql.execute("USE test")
+ tdLog.info("start to test auto insert with cache...")
+ tdSql.execute("CREATE TABLE t_2 USING stb TAGS (2)")
+ tdLog.info("start to insert to init cache...")
+ tdSql.execute("INSERT INTO t_2 VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.execute("INSERT INTO t_2 USING stb TAGS (2) VALUES ('2024-01-01 00:00:01', 1, 2.0, 'test')")
+ tdSql.query("select * from t_2")
+ tdSql.checkRows(2)
+
+ def insert_table_auto_insert_with_multi_rows(self):
+ tdLog.info(f"insert table auto insert with multi rows")
+ tdSql.execute("USE test")
+ tdLog.info("start to test auto insert with multi rows...")
+ tdSql.execute("CREATE TABLE t_3 USING stb TAGS (3)")
+ tdLog.info("start to insert multi rows...")
+ tdSql.execute("INSERT INTO t_3 VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test'), ('2024-01-01 00:00:01', 1, 2.0, 'test')")
+ tdSql.query("select * from t_3")
+ tdSql.checkRows(2)
+
+ tdLog.info("start to insert multi rows with direct insert and auto create...")
+ tdSql.execute("INSERT INTO t_4 USING stb TAGS (4) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test'), t_3 VALUES ('2024-01-01 00:00:02', 1, 2.0, 'test')")
+ tdSql.query("select * from t_4")
+ tdSql.checkRows(1)
+ tdSql.query("select * from t_3")
+ tdSql.checkRows(3)
+
+ tdLog.info("start to insert multi rows with auto create and direct insert...")
+ tdSql.execute("INSERT INTO t_3 VALUES ('2024-01-01 00:00:03', 1, 2.0, 'test'),t_4 USING stb TAGS (4) VALUES ('2024-01-01 00:00:01', 1, 2.0, 'test'),")
+ tdSql.query("select * from t_4")
+ tdSql.checkRows(2)
+ tdSql.query("select * from t_3")
+ tdSql.checkRows(4)
+
+ tdLog.info("start to insert multi rows with auto create into same table...")
+ tdSql.execute("INSERT INTO t_10 USING stb TAGS (10) VALUES ('2024-01-01 00:00:04', 1, 2.0, 'test'),t_10 USING stb TAGS (10) VALUES ('2024-01-01 00:00:05', 1, 2.0, 'test'),")
+ tdSql.query("select * from t_10")
+ tdSql.checkRows(2)
+
+ def check_some_err_case(self):
+ tdLog.info(f"check some err case")
+ tdSql.execute("USE test")
+
+ tdLog.info("start to test err stb name...")
+ tdSql.error("INSERT INTO t_5 USING errrrxx TAGS (5) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="Table does not exist")
+
+ tdLog.info("start to test err syntax name...")
+ tdSql.error("INSERT INTO t_5 USING stb TAG (5) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+
+ tdLog.info("start to test err syntax values...")
+ tdSql.error("INSERT INTO t_5 USING stb TAG (5) VALUS ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+
+ tdLog.info("start to test err tag counts...")
+ tdSql.error("INSERT INTO t_5 USING stb TAG (5,1) VALUS ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+
+ tdLog.info("start to test err tag counts...")
+ tdSql.error("INSERT INTO t_5 USING stb TAG ('dasds') VALUS ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+
+ tdLog.info("start to test err values counts...")
+ tdSql.error("INSERT INTO t_5 USING stb TAGS (5) VALUES ('2024-01-01 00:00:00', 1, 1 ,2.0, 'test')", expectErrInfo="Illegal number of columns")
+
+ tdLog.info("start to test err values...")
+ tdSql.error("INSERT INTO t_5 USING stb TAGS (5) VALUES ('2024-01-01 00:00:00', 'dasdsa', 1 ,2.0, 'test')", expectErrInfo="syntax error")
+
+ def check_same_table_same_ts(self):
+ tdLog.info(f"check same table same ts")
+ tdSql.execute("USE test")
+ tdSql.execute("INSERT INTO t_6 USING stb TAGS (6) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test') t_6 USING stb TAGS (6) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.query("select * from t_6")
+ tdSql.checkRows(1)
+
+ def check_tag_parse_error_with_cache(self):
+ tdLog.info(f"check tag parse error with cache")
+ tdSql.execute("USE test")
+ tdSql.execute("INSERT INTO t_7 USING stb TAGS (7) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.error("INSERT INTO t_7 USING stb TAGS ('ddd') VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+ tdSql.query("select * from t_7")
+ tdSql.checkRows(1)
+
+ def check_duplicate_table_with_err_tag(self):
+ tdLog.info(f"check tag parse error with cache")
+ tdSql.execute("USE test")
+ tdSql.error("INSERT INTO t_8 USING stb TAGS (8) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test') t_8 USING stb TAGS (ddd) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+
+ # run
+ def run(self):
+ tdLog.debug(f"start to excute {__file__}")
+
+ # prepare database
+ self.prepare_database()
+
+ # insert table auto create
+ self.insert_table_auto_create()
+
+ # insert table pre create
+ self.insert_table_pre_create()
+
+ # insert table auto insert with cache
+ self.insert_table_auto_insert_with_cache()
+
+ # insert table auto insert with multi rows
+ self.insert_table_auto_insert_with_multi_rows()
+
+ # check some err case
+ self.check_some_err_case()
+
+ # check same table same ts
+ self.check_same_table_same_ts()
+
+ # check tag parse error with cache
+ self.check_tag_parse_error_with_cache()
+
+ # check duplicate table with err tag
+ self.check_duplicate_table_with_err_tag()
+
+ tdLog.success(f"{__file__} successfully executed")
+
+tdCases.addLinux(__file__, TDTestCase())
+tdCases.addWindows(__file__, TDTestCase())
\ No newline at end of file
From 0d630ecdf9a9265b9e29b400719fc804d26abb40 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Tue, 4 Mar 2025 14:19:57 +0800
Subject: [PATCH 060/105] fix: fix csv file handle leaks
---
tools/taos-tools/src/benchCsv.c | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index cf8527d375..848cd9a6ef 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -448,7 +448,7 @@ static int csvExportCreateSql(CsvWriteMeta* write_meta) {
goto end;
}
- succPrint("Export create sql to file: %s successfully..\n", fullname);
+ succPrint("Export create sql to file: %s successfully.\n", fullname);
end:
if (fp) {
@@ -827,6 +827,10 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
static void csvClose(CsvFileHandle* fhdl) {
+ if (!fhdl) {
+ return;
+ }
+
if (fhdl->compress_level == CSV_COMPRESS_NONE) {
if (fhdl->handle.fp) {
fclose(fhdl->handle.fp);
@@ -838,6 +842,7 @@ static void csvClose(CsvFileHandle* fhdl) {
fhdl->handle.gf = NULL;
}
}
+ tmfree(fhdl);
}
From c641136b43c0aace69dcc67912014077e4e40db1 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Tue, 4 Mar 2025 14:21:48 +0800
Subject: [PATCH 061/105] enh: TD-33933-decouple-send-heartbeat-fix-cases
---
source/util/src/ttimer.c | 3 +++
1 file changed, 3 insertions(+)
diff --git a/source/util/src/ttimer.c b/source/util/src/ttimer.c
index 67f76c6393..3e6dfa1a25 100644
--- a/source/util/src/ttimer.c
+++ b/source/util/src/ttimer.c
@@ -686,6 +686,9 @@ void taosTmrCleanUp(void* handle) {
taosCleanUpScheduler(tmrQhandle);
taosMemoryFreeClear(tmrQhandle);
+ taosCleanUpScheduler(tmrQhandleHigh);
+ taosMemoryFreeClear(tmrQhandleHigh);
+
for (int32_t i = 0; i < tListLen(wheels); i++) {
time_wheel_t* wheel = wheels + i;
(void)taosThreadMutexDestroy(&wheel->mutex);
From be9732d774ee18112e11e0d89346bd35bdc56fe0 Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 14:39:14 +0800
Subject: [PATCH 062/105] Fix(insert):insert into an exist ctable in another
stable.
---
source/libs/parser/src/parInsertSql.c | 55 +++++++++++++++----------
tests/army/insert/auto_create_insert.py | 14 +++++++
2 files changed, 48 insertions(+), 21 deletions(-)
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index 74fb667733..290011a708 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -144,8 +144,6 @@ static int32_t ignoreUsingClause(SInsertParseContext* pCxt, const char** pSql) {
static int32_t ignoreUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
const char** pSql = &pStmt->pSql;
int32_t code = TSDB_CODE_SUCCESS;
- SToken token;
- NEXT_TOKEN(*pSql, token);
code = parseBoundTagsClause(pCxt, pStmt);
if (TSDB_CODE_SUCCESS != code) {
return code;
@@ -181,6 +179,8 @@ static int32_t parseDuplicateUsingClause(SInsertParseContext* pCxt, SVnodeModify
if (TSDB_CODE_SUCCESS != code) {
return code;
}
+ SToken token;
+ NEXT_TOKEN(pStmt->pSql, token);
return ignoreUsingClause(pCxt, pStmt);
}
@@ -1316,16 +1316,37 @@ static int32_t preParseUsingTableName(SInsertParseContext* pCxt, SVnodeModifyOpS
return insCreateSName(&pStmt->usingTableName, pTbName, pCxt->pComCxt->acctId, pCxt->pComCxt->db, &pCxt->msg);
}
-static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool* ctbCacheHit) {
int32_t code = TSDB_CODE_SUCCESS;
+ STableMeta* pStableMeta = NULL;
+ STableMeta* pCtableMeta = NULL;
if (pCxt->forceUpdate) {
pCxt->missCache = true;
return TSDB_CODE_SUCCESS;
}
if (!pCxt->missCache) {
bool bUsingTable = true;
- code = getTableMeta(pCxt, &pStmt->usingTableName, &pStmt->pTableMeta, &pCxt->missCache, bUsingTable);
+ code = getTableMeta(pCxt, &pStmt->usingTableName, &pStableMeta, &pCxt->missCache, bUsingTable);
}
+
+ if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
+ bool bUsingTable = false;
+ code = getTableMeta(pCxt, &pStmt->targetTableName, &pCtableMeta, &pCxt->missCache, bUsingTable);
+ }
+
+ if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
+ code = (pStableMeta->suid == pCtableMeta->suid) ? TSDB_CODE_SUCCESS : TSDB_CODE_TDB_TABLE_IN_OTHER_STABLE;
+ *ctbCacheHit = true;
+ }
+ if (TSDB_CODE_SUCCESS == code) {
+ if (*ctbCacheHit) {
+ code = cloneTableMeta(pCtableMeta,&pStmt->pTableMeta);
+ } else {
+ code = cloneTableMeta( pStableMeta,&pStmt->pTableMeta);
+ }
+ }
+ taosMemoryFree(pStableMeta);
+ taosMemoryFree(pCtableMeta);
if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
code = getTargetTableVgroup(pCxt->pComCxt, pStmt, true, &pCxt->missCache);
}
@@ -1341,9 +1362,14 @@ static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt
static int32_t parseUsingTableNameImpl(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
SToken token;
NEXT_TOKEN(pStmt->pSql, token);
+ bool ctbCacheHit = false;
int32_t code = preParseUsingTableName(pCxt, pStmt, &token);
if (TSDB_CODE_SUCCESS == code) {
- code = getUsingTableSchema(pCxt, pStmt);
+ code = getUsingTableSchema(pCxt, pStmt, &ctbCacheHit);
+ if (TSDB_CODE_SUCCESS == code && ctbCacheHit) {
+ pStmt->usingTableProcessing = false;
+ return ignoreUsingClause(pCxt, pStmt);
+ }
}
if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
code = storeChildTableMeta(pCxt, pStmt);
@@ -1360,27 +1386,14 @@ static int32_t parseUsingTableNameImpl(SInsertParseContext* pCxt, SVnodeModifyOp
static int32_t parseUsingTableName(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
SToken token;
int32_t index = 0;
- int32_t code = TSDB_CODE_SUCCESS;
-
NEXT_TOKEN_KEEP_SQL(pStmt->pSql, token, index);
- if (pCxt->isStmtBind) {
- if (token.type != TK_USING) {
- return getTargetTableSchema(pCxt, pStmt);
- }
- } else {
- code = getTargetTableSchema(pCxt, pStmt);
- if (token.type != TK_USING) {
- return code;
- } else if ((!pCxt->missCache) && (TSDB_CODE_SUCCESS == code) && (!pCxt->isStmtBind)) {
- pStmt->pSql += index;
- return ignoreUsingClause(pCxt, pStmt);
- }
+ if (TK_USING != token.type) {
+ return getTargetTableSchema(pCxt, pStmt);
}
-
pStmt->usingTableProcessing = true;
// pStmt->pSql -> stb_name [(tag1_name, ...)
pStmt->pSql += index;
- code = parseDuplicateUsingClause(pCxt, pStmt, &pCxt->usingDuplicateTable);
+ int32_t code = parseDuplicateUsingClause(pCxt, pStmt, &pCxt->usingDuplicateTable);
if (TSDB_CODE_SUCCESS == code && !pCxt->usingDuplicateTable) {
return parseUsingTableNameImpl(pCxt, pStmt);
}
diff --git a/tests/army/insert/auto_create_insert.py b/tests/army/insert/auto_create_insert.py
index 4137c0018c..efaa421a6c 100644
--- a/tests/army/insert/auto_create_insert.py
+++ b/tests/army/insert/auto_create_insert.py
@@ -142,6 +142,17 @@ class TDTestCase(TBase):
tdSql.execute("USE test")
tdSql.error("INSERT INTO t_8 USING stb TAGS (8) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test') t_8 USING stb TAGS (ddd) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+ def check_table_with_another_stb_name(self):
+ tdLog.info(f"check table with another stb name")
+ tdSql.execute("USE test")
+ tdSql.execute("CREATE STABLE IF NOT EXISTS stb2 (ts TIMESTAMP, a INT, b FLOAT, c BINARY(10)) TAGS (e_id INT)")
+ tdSql.execute("INSERT INTO t_20 USING stb2 TAGS (20) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.query("select * from t_20")
+ tdSql.checkRows(1)
+ tdSql.error("INSERT INTO t_20 USING stb TAGS (20) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="Table already exists in other stables")
+ tdSql.error("INSERT INTO t_20 USING stb TAGS (20) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="Table already exists in other stables")
+
+
# run
def run(self):
tdLog.debug(f"start to excute {__file__}")
@@ -173,6 +184,9 @@ class TDTestCase(TBase):
# check duplicate table with err tag
self.check_duplicate_table_with_err_tag()
+ # check table with another stb name
+ self.check_table_with_another_stb_name()
+
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
From e23a657f5315b68fb4047c52809803848eb37312 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Tue, 4 Mar 2025 14:50:21 +0800
Subject: [PATCH 063/105] fix: ci errors
---
tests/script/tsim/sync/3Replica1VgElect.sim | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/tests/script/tsim/sync/3Replica1VgElect.sim b/tests/script/tsim/sync/3Replica1VgElect.sim
index 6ebee885a8..1de856f9a4 100644
--- a/tests/script/tsim/sync/3Replica1VgElect.sim
+++ b/tests/script/tsim/sync/3Replica1VgElect.sim
@@ -91,19 +91,19 @@ endi
if $data[0][4] == leader then
if $data[0][7] == follower then
if $data[0][10] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][3]
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][4]
endi
endi
-elif $data[0][6] == leader then
- if $data[0][7] == follower then
- if $data[0][10] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][5]
+elif $data[0][7] == leader then
+ if $data[0][10] == follower then
+ if $data[0][4] == follower then
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][7]
endi
endi
elif $data[0][10] == leader then
if $data[0][4] == follower then
if $data[0][7] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][7]
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][10]
endi
endi
else
From 69e88f9f5cbbee2756443c2e3cea1437bafb79d7 Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 15:10:47 +0800
Subject: [PATCH 064/105] Fix(stmt): skip stmt while use ctg cache.
---
source/libs/catalog/src/ctgRemote.c | 3 ++-
source/libs/parser/src/parInsertSql.c | 8 ++++++--
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git a/source/libs/catalog/src/ctgRemote.c b/source/libs/catalog/src/ctgRemote.c
index dca4e2d2fa..abd97e4ed9 100644
--- a/source/libs/catalog/src/ctgRemote.c
+++ b/source/libs/catalog/src/ctgRemote.c
@@ -1367,6 +1367,7 @@ int32_t ctgGetTbMetaFromMnode(SCatalog* pCtg, SRequestConnInfo* pConn, const SNa
int32_t ctgGetTbMetaFromVnode(SCatalog* pCtg, SRequestConnInfo* pConn, const SName* pTableName, SVgroupInfo* vgroupInfo,
STableMetaOutput* out, SCtgTaskReq* tReq) {
SCtgTask* pTask = tReq ? tReq->pTask : NULL;
+ uint8_t autoCreateCtb = tReq ? tReq->autoCreateCtb : 0;
char dbFName[TSDB_DB_FNAME_LEN];
(void)tNameGetFullDbName(pTableName, dbFName);
int32_t reqType = (pTask && pTask->type == CTG_TASK_GET_TB_NAME ? TDMT_VND_TABLE_NAME : TDMT_VND_TABLE_META);
@@ -1380,7 +1381,7 @@ int32_t ctgGetTbMetaFromVnode(SCatalog* pCtg, SRequestConnInfo* pConn, const SNa
SBuildTableInput bInput = {.vgId = vgroupInfo->vgId,
.option = reqType == TDMT_VND_TABLE_NAME ? REQ_OPT_TBUID : REQ_OPT_TBNAME,
- .autoCreateCtb = tReq->autoCreateCtb,
+ .autoCreateCtb = autoCreateCtb,
.dbFName = dbFName,
.tbName = (char*)tNameGetTableName(pTableName)};
char* msg = NULL;
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index 290011a708..e322dd1dc7 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -1328,6 +1328,9 @@ static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt
bool bUsingTable = true;
code = getTableMeta(pCxt, &pStmt->usingTableName, &pStableMeta, &pCxt->missCache, bUsingTable);
}
+ if (pCxt->isStmtBind) {
+ goto _no_ctb_cache;
+ }
if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
bool bUsingTable = false;
@@ -1338,11 +1341,12 @@ static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt
code = (pStableMeta->suid == pCtableMeta->suid) ? TSDB_CODE_SUCCESS : TSDB_CODE_TDB_TABLE_IN_OTHER_STABLE;
*ctbCacheHit = true;
}
+_no_ctb_cache:
if (TSDB_CODE_SUCCESS == code) {
if (*ctbCacheHit) {
- code = cloneTableMeta(pCtableMeta,&pStmt->pTableMeta);
+ code = cloneTableMeta(pCtableMeta, &pStmt->pTableMeta);
} else {
- code = cloneTableMeta( pStableMeta,&pStmt->pTableMeta);
+ code = cloneTableMeta(pStableMeta, &pStmt->pTableMeta);
}
}
taosMemoryFree(pStableMeta);
From 97286faa8d94371cfc8aa6025033cec6eba38bd7 Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 15:20:10 +0800
Subject: [PATCH 065/105] Test(insert): add auto_create_insert to ci.
---
tests/parallel_test/cases.task | 1 +
1 file changed, 1 insertion(+)
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 8f986ad445..cd6fed7fb9 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -51,6 +51,7 @@
,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_ts5400.py
,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_having.py
,,y,army,./pytest.sh python3 ./test.py -f insert/insert_basic.py -N 3
+,,y,army,./pytest.sh python3 ./test.py -f insert/auto_create_insert.py
,,y,army,./pytest.sh python3 ./test.py -f cluster/splitVgroupByLearner.py -N 3
,,y,army,./pytest.sh python3 ./test.py -f authorith/authBasic.py -N 3
,,n,army,python3 ./test.py -f cmdline/fullopt.py
From ecff4313186ecef32bc9a1e4404427025a6025e6 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Tue, 4 Mar 2025 16:10:49 +0800
Subject: [PATCH 066/105] build: add dependency library z
---
tools/taos-tools/src/CMakeLists.txt | 2 ++
1 file changed, 2 insertions(+)
diff --git a/tools/taos-tools/src/CMakeLists.txt b/tools/taos-tools/src/CMakeLists.txt
index 1f0899db5c..4fa7fb0dc4 100644
--- a/tools/taos-tools/src/CMakeLists.txt
+++ b/tools/taos-tools/src/CMakeLists.txt
@@ -363,3 +363,5 @@ ELSE ()
TARGET_LINK_LIBRARIES(taosBenchmark taos msvcregex pthread toolscJson ${WEBSOCKET_LINK_FLAGS})
ENDIF ()
+
+target_link_libraries(taosBenchmark PRIVATE z)
From 0c55e8a0ad630c12ca3e6a4e373227e5a67b7d7a Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Tue, 4 Mar 2025 16:21:55 +0800
Subject: [PATCH 067/105] build: delete key PRIVATE in target_link_libraries
---
tools/taos-tools/src/CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tools/taos-tools/src/CMakeLists.txt b/tools/taos-tools/src/CMakeLists.txt
index 4fa7fb0dc4..5bc2703165 100644
--- a/tools/taos-tools/src/CMakeLists.txt
+++ b/tools/taos-tools/src/CMakeLists.txt
@@ -364,4 +364,4 @@ ELSE ()
TARGET_LINK_LIBRARIES(taosBenchmark taos msvcregex pthread toolscJson ${WEBSOCKET_LINK_FLAGS})
ENDIF ()
-target_link_libraries(taosBenchmark PRIVATE z)
+target_link_libraries(taosBenchmark z)
From 0498d146a1fad5838f9f03d7c690efbc8c8ae839 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Tue, 4 Mar 2025 16:37:51 +0800
Subject: [PATCH 068/105] fix: ci errors
---
utils/CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt
index a6377f15a8..721da32bfc 100644
--- a/utils/CMakeLists.txt
+++ b/utils/CMakeLists.txt
@@ -2,7 +2,7 @@
ADD_SUBDIRECTORY(tsim)
IF(${BUILD_TEST})
- ADD_SUBDIRECTORY(test)
+ ADD_SUBDIRECTORY(test/c)
ENDIF(${BUILD_TEST})
# ADD_SUBDIRECTORY(comparisonTest/tdengine)
From 7f5d687882762b11f4c25a1422ea01194c01986a Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Tue, 4 Mar 2025 20:10:40 +0800
Subject: [PATCH 069/105] Enh(insert): use mini cache in stmt to improve insert
speed.
---
include/libs/nodes/querynodes.h | 1 +
source/libs/nodes/src/nodesUtilFuncs.c | 1 +
source/libs/parser/src/parInsertSql.c | 21 ++++++++++++++++++---
tests/army/insert/auto_create_bench.py | 16 ++++++++--------
4 files changed, 28 insertions(+), 11 deletions(-)
diff --git a/include/libs/nodes/querynodes.h b/include/libs/nodes/querynodes.h
index 3c15ffa6b4..7191b4ef4a 100644
--- a/include/libs/nodes/querynodes.h
+++ b/include/libs/nodes/querynodes.h
@@ -571,6 +571,7 @@ typedef struct SVnodeModifyOpStmt {
SHashObj* pVgroupsHashObj; // SHashObj
SHashObj* pTableBlockHashObj; // SHashObj
SHashObj* pSubTableHashObj; // SHashObj
+ SHashObj* pSuperTableHashObj; // SHashObj
SHashObj* pTableNameHashObj; // set of table names for refreshing meta, sync mode
SHashObj* pDbFNameHashObj; // set of db names for refreshing meta, sync mode
SHashObj* pTableCxtHashObj; // temp SHashObj for single request
diff --git a/source/libs/nodes/src/nodesUtilFuncs.c b/source/libs/nodes/src/nodesUtilFuncs.c
index ee7eff273d..8d47a90d19 100644
--- a/source/libs/nodes/src/nodesUtilFuncs.c
+++ b/source/libs/nodes/src/nodesUtilFuncs.c
@@ -1325,6 +1325,7 @@ void nodesDestroyNode(SNode* pNode) {
taosArrayDestroy(pStmt->pTableTag);
taosHashCleanup(pStmt->pVgroupsHashObj);
taosHashCleanup(pStmt->pSubTableHashObj);
+ taosHashCleanup(pStmt->pSuperTableHashObj);
taosHashCleanup(pStmt->pTableNameHashObj);
taosHashCleanup(pStmt->pDbFNameHashObj);
taosHashCleanup(pStmt->pTableCxtHashObj);
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index e322dd1dc7..456ab93e20 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -1325,8 +1325,22 @@ static int32_t getUsingTableSchema(SInsertParseContext* pCxt, SVnodeModifyOpStmt
return TSDB_CODE_SUCCESS;
}
if (!pCxt->missCache) {
- bool bUsingTable = true;
- code = getTableMeta(pCxt, &pStmt->usingTableName, &pStableMeta, &pCxt->missCache, bUsingTable);
+ char tbFName[TSDB_TABLE_FNAME_LEN];
+ code = tNameExtractFullName(&pStmt->usingTableName, tbFName);
+ if (TSDB_CODE_SUCCESS != code) {
+ return code;
+ }
+ STableMeta** ppStableMeta = taosHashGet(pStmt->pSuperTableHashObj, tbFName, strlen(tbFName));
+ if (NULL != ppStableMeta) {
+ pStableMeta = *ppStableMeta;
+ }
+ if (NULL == pStableMeta) {
+ bool bUsingTable = true;
+ code = getTableMeta(pCxt, &pStmt->usingTableName, &pStableMeta, &pCxt->missCache, bUsingTable);
+ if (TSDB_CODE_SUCCESS == code) {
+ code = taosHashPut(pStmt->pSuperTableHashObj, tbFName, strlen(tbFName), &pStableMeta, POINTER_BYTES);
+ }
+ }
}
if (pCxt->isStmtBind) {
goto _no_ctb_cache;
@@ -1349,7 +1363,6 @@ _no_ctb_cache:
code = cloneTableMeta(pStableMeta, &pStmt->pTableMeta);
}
}
- taosMemoryFree(pStableMeta);
taosMemoryFree(pCtableMeta);
if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
code = getTargetTableVgroup(pCxt->pComCxt, pStmt, true, &pCxt->missCache);
@@ -2847,6 +2860,7 @@ static int32_t createVnodeModifOpStmt(SInsertParseContext* pCxt, bool reentry, S
}
}
pStmt->pSubTableHashObj = taosHashInit(128, taosGetDefaultHashFunction(TSDB_DATA_TYPE_VARCHAR), true, HASH_NO_LOCK);
+ pStmt->pSuperTableHashObj = taosHashInit(128, taosGetDefaultHashFunction(TSDB_DATA_TYPE_VARCHAR), true, HASH_NO_LOCK);
pStmt->pTableNameHashObj = taosHashInit(128, taosGetDefaultHashFunction(TSDB_DATA_TYPE_VARCHAR), true, HASH_NO_LOCK);
pStmt->pDbFNameHashObj = taosHashInit(64, taosGetDefaultHashFunction(TSDB_DATA_TYPE_VARCHAR), true, HASH_NO_LOCK);
if ((!reentry && (NULL == pStmt->pVgroupsHashObj || NULL == pStmt->pTableBlockHashObj)) ||
@@ -2856,6 +2870,7 @@ static int32_t createVnodeModifOpStmt(SInsertParseContext* pCxt, bool reentry, S
}
taosHashSetFreeFp(pStmt->pSubTableHashObj, destroySubTableHashElem);
+ taosHashSetFreeFp(pStmt->pSuperTableHashObj, destroySubTableHashElem);
*pOutput = (SNode*)pStmt;
return TSDB_CODE_SUCCESS;
diff --git a/tests/army/insert/auto_create_bench.py b/tests/army/insert/auto_create_bench.py
index 0996aac87a..d8ac9dde24 100644
--- a/tests/army/insert/auto_create_bench.py
+++ b/tests/army/insert/auto_create_bench.py
@@ -100,15 +100,15 @@ if __name__ == "__main__":
precreate_tables()
# 测试场景1:自动建表插入
auto_create_time = test_auto_create_tables()
- # 清理环境并重新初始化
- prepare_database()
- # 预创建所有子表
- precreate_tables()
- # 测试场景2:直接插入
- direct_insert_time = test_direct_insert()
+ # # 清理环境并重新初始化
+ # prepare_database()
+ # # 预创建所有子表
+ # precreate_tables()
+ # # 测试场景2:直接插入
+ # direct_insert_time = test_direct_insert()
# 打印最终结果
print("\n测试结果对比:")
print(f"自动建表插入耗时: {auto_create_time:.2f} 秒")
- print(f"直接插入耗时: {direct_insert_time:.2f} 秒")
- print(f"性能差异: {auto_create_time/direct_insert_time:.1f} 倍")
\ No newline at end of file
+ # print(f"直接插入耗时: {direct_insert_time:.2f} 秒")
+ # print(f"性能差异: {auto_create_time/direct_insert_time:.1f} 倍")
\ No newline at end of file
From 21269eb3665b6870b14496ffee7b48826cc5ff14 Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Wed, 5 Mar 2025 09:45:43 +0800
Subject: [PATCH 070/105] Fix(insert): keep product behavior consistent with
previous.
---
source/libs/parser/src/parInsertSql.c | 13 +++++--------
1 file changed, 5 insertions(+), 8 deletions(-)
diff --git a/source/libs/parser/src/parInsertSql.c b/source/libs/parser/src/parInsertSql.c
index 456ab93e20..5af6d11122 100644
--- a/source/libs/parser/src/parInsertSql.c
+++ b/source/libs/parser/src/parInsertSql.c
@@ -104,8 +104,8 @@ static int32_t skipTableOptions(SInsertParseContext* pCxt, const char** pSql) {
}
// pSql -> stb_name [(tag1_name, ...)] TAGS (tag1_value, ...)
-#if 0
-static int32_t ignoreUsingClause(SInsertParseContext* pCxt, const char** pSql) {
+static int32_t ignoreUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+ const char** pSql = &pStmt->pSql;
int32_t code = TSDB_CODE_SUCCESS;
SToken token;
NEXT_TOKEN(*pSql, token);
@@ -140,8 +140,8 @@ static int32_t ignoreUsingClause(SInsertParseContext* pCxt, const char** pSql) {
return code;
}
-#else
-static int32_t ignoreUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
+
+static int32_t ignoreUsingClauseAndCheckTagValues(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt) {
const char** pSql = &pStmt->pSql;
int32_t code = TSDB_CODE_SUCCESS;
code = parseBoundTagsClause(pCxt, pStmt);
@@ -160,7 +160,6 @@ static int32_t ignoreUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt*
return code;
}
-#endif
static int32_t parseDuplicateUsingClause(SInsertParseContext* pCxt, SVnodeModifyOpStmt* pStmt, bool* pDuplicate) {
int32_t code = TSDB_CODE_SUCCESS;
@@ -179,8 +178,6 @@ static int32_t parseDuplicateUsingClause(SInsertParseContext* pCxt, SVnodeModify
if (TSDB_CODE_SUCCESS != code) {
return code;
}
- SToken token;
- NEXT_TOKEN(pStmt->pSql, token);
return ignoreUsingClause(pCxt, pStmt);
}
@@ -1385,7 +1382,7 @@ static int32_t parseUsingTableNameImpl(SInsertParseContext* pCxt, SVnodeModifyOp
code = getUsingTableSchema(pCxt, pStmt, &ctbCacheHit);
if (TSDB_CODE_SUCCESS == code && ctbCacheHit) {
pStmt->usingTableProcessing = false;
- return ignoreUsingClause(pCxt, pStmt);
+ return ignoreUsingClauseAndCheckTagValues(pCxt, pStmt);
}
}
if (TSDB_CODE_SUCCESS == code && !pCxt->missCache) {
From 4314656867c652d7092d471f9dd5e814c4e0d93f Mon Sep 17 00:00:00 2001
From: xiao-77
Date: Wed, 5 Mar 2025 10:02:05 +0800
Subject: [PATCH 071/105] Test(insert): reslove ci errors.
---
tests/army/insert/auto_create_insert.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/tests/army/insert/auto_create_insert.py b/tests/army/insert/auto_create_insert.py
index efaa421a6c..6d57c4023f 100644
--- a/tests/army/insert/auto_create_insert.py
+++ b/tests/army/insert/auto_create_insert.py
@@ -140,7 +140,9 @@ class TDTestCase(TBase):
def check_duplicate_table_with_err_tag(self):
tdLog.info(f"check tag parse error with cache")
tdSql.execute("USE test")
- tdSql.error("INSERT INTO t_8 USING stb TAGS (8) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test') t_8 USING stb TAGS (ddd) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')", expectErrInfo="syntax error")
+ tdSql.execute("INSERT INTO t_8 USING stb TAGS (8) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test') t_8 USING stb TAGS (ddd) VALUES ('2024-01-01 00:00:00', 1, 2.0, 'test')")
+ tdSql.query("select * from t_8")
+ tdSql.checkRows(1)
def check_table_with_another_stb_name(self):
tdLog.info(f"check table with another stb name")
From 47ded1b71de8334f0eae6f5211acb24fa50447fe Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Wed, 5 Mar 2025 10:03:46 +0800
Subject: [PATCH 072/105] feat: Supports csv parameters at the super table
level
---
tools/taos-tools/inc/bench.h | 32 ++--
tools/taos-tools/src/benchCsv.c | 265 +++++++++++++++-------------
tools/taos-tools/src/benchJsonOpt.c | 118 ++++++-------
3 files changed, 214 insertions(+), 201 deletions(-)
diff --git a/tools/taos-tools/inc/bench.h b/tools/taos-tools/inc/bench.h
index 30973170a3..c413d953b7 100644
--- a/tools/taos-tools/inc/bench.h
+++ b/tools/taos-tools/inc/bench.h
@@ -479,6 +479,13 @@ typedef struct SChildTable_S {
int32_t pkCnt;
} SChildTable;
+typedef enum {
+ CSV_COMPRESS_NONE = 0,
+ CSV_COMPRESS_FAST = 1,
+ CSV_COMPRESS_BALANCE = 6,
+ CSV_COMPRESS_BEST = 9
+} CsvCompressionLevel;
+
#define PRIMARY_KEY "PRIMARY KEY"
typedef struct SSuperTable_S {
char *stbName;
@@ -581,6 +588,15 @@ typedef struct SSuperTable_S {
// execute sqls after create super table
char **sqls;
+
+ char* csv_file_prefix;
+ char* csv_ts_format;
+ char* csv_ts_interval;
+ char* csv_tbname_alias;
+ long csv_ts_intv_secs;
+ bool csv_output_header;
+ CsvCompressionLevel csv_compress_level;
+
} SSuperTable;
typedef struct SDbCfg_S {
@@ -719,14 +735,6 @@ typedef struct STmqMetaInfo_S {
uint16_t iface;
} STmqMetaInfo;
-
-typedef enum {
- CSV_COMPRESS_NONE = 0,
- CSV_COMPRESS_FAST = 1,
- CSV_COMPRESS_BALANCE = 6,
- CSV_COMPRESS_BEST = 9
-} CsvCompressionLevel;
-
typedef struct SArguments_S {
uint8_t taosc_version;
char * metaFile;
@@ -791,14 +799,6 @@ typedef struct SArguments_S {
char* output_path;
char output_path_buf[MAX_PATH_LEN];
- char* csv_file_prefix;
- char* csv_ts_format;
- char* csv_ts_interval;
- char* csv_tbname_alias;
- long csv_ts_intv_secs;
- bool csv_output_header;
-
- CsvCompressionLevel csv_compress_level;
} SArguments;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 848cd9a6ef..dd6ce3360a 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -31,6 +31,111 @@
+static int csvValidateParamTsFormat(const char* csv_ts_format) {
+ if (!csv_ts_format) return 0;
+
+ struct tm test_tm = {
+ .tm_year = 70,
+ .tm_mon = 0,
+ .tm_mday = 1,
+ .tm_hour = 0,
+ .tm_min = 0,
+ .tm_sec = 0,
+ .tm_isdst = -1
+ };
+ mktime(&test_tm);
+
+ char buffer[1024];
+ size_t len = strftime(buffer, sizeof(buffer), csv_ts_format, &test_tm);
+ if (len == 0) {
+ return -1;
+ }
+
+#ifdef _WIN32
+ const char* invalid_chars = "/\\:*?\"<>|";
+#else
+ const char* invalid_chars = "/\\?\"<>|";
+#endif
+ if (strpbrk(buffer, invalid_chars) != NULL) {
+ return -1;
+ }
+
+ return 0;
+}
+
+
+static long csvValidateParamTsInterval(const char* csv_ts_interval) {
+ if (!csv_ts_interval || *csv_ts_interval == '\0') return -1;
+
+ char* endptr;
+ errno = 0;
+ const long num = strtol(csv_ts_interval, &endptr, 10);
+
+ if (errno == ERANGE ||
+ endptr == csv_ts_interval ||
+ num <= 0) {
+ return -1;
+ }
+
+ if (*endptr == '\0' ||
+ *(endptr + 1) != '\0') {
+ return -1;
+ }
+
+ switch (tolower(*endptr)) {
+ case 's': return num;
+ case 'm': return num * 60;
+ case 'h': return num * 60 * 60;
+ case 'd': return num * 60 * 60 * 24;
+ default : return -1;
+ }
+}
+
+
+static int csvParseParameter() {
+ // csv_output_path
+ size_t len = strlen(g_arguments->output_path);
+ if (len == 0) {
+ errorPrint("Failed to generate csv files, the specified output path is empty. Please provide a valid path.\n");
+ return -1;
+ }
+ if (g_arguments->output_path[len - 1] != '/') {
+ int n = snprintf(g_arguments->output_path_buf, sizeof(g_arguments->output_path_buf), "%s/", g_arguments->output_path);
+ if (n < 0 || n >= sizeof(g_arguments->output_path_buf)) {
+ errorPrint("Failed to generate csv files, path buffer overflow risk when appending '/'. path: %s.\n",
+ g_arguments->output_path);
+ return -1;
+ }
+ g_arguments->output_path = g_arguments->output_path_buf;
+ }
+
+ return 0;
+}
+
+
+static int csvParseStbParameter(SSuperTable* stb) {
+ // csv_ts_format
+ if (stb->csv_ts_format) {
+ if (csvValidateParamTsFormat(stb->csv_ts_format) != 0) {
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s.\n",
+ stb->csv_ts_format);
+ return -1;
+ }
+ }
+
+ // csv_ts_interval
+ long csv_ts_intv_secs = csvValidateParamTsInterval(stb->csv_ts_interval);
+ if (csv_ts_intv_secs <= 0) {
+ errorPrint("Failed to generate csv files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s.\n",
+ stb->csv_ts_interval);
+ return -1;
+ }
+ stb->csv_ts_intv_secs = csv_ts_intv_secs;
+
+ return 0;
+}
+
+
static time_t csvGetStartSeconds(int precision, int64_t start_ts) {
time_t start_seconds = 0;
@@ -45,7 +150,7 @@ static time_t csvGetStartSeconds(int precision, int64_t start_ts) {
}
-static void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_size) {
+static void csvConvertTime2String(time_t time_value, char* ts_format, char* time_buf, size_t buf_size) {
struct tm tm_result;
char *old_locale = setlocale(LC_TIME, "C");
#ifdef _WIN32
@@ -53,7 +158,7 @@ static void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_
#else
gmtime_r(&time_value, &tm_result);
#endif
- strftime(time_buf, buf_size, g_arguments->csv_ts_format, &tm_result);
+ strftime(time_buf, buf_size, ts_format, &tm_result);
if (old_locale) {
setlocale(LC_TIME, old_locale);
}
@@ -63,13 +168,13 @@ static void csvConvertTime2String(time_t time_value, char* time_buf, size_t buf_
static CsvNamingType csvGetFileNamingType(SSuperTable* stb) {
if (stb->interlaceRows > 0) {
- if (g_arguments->csv_ts_format) {
+ if (stb->csv_ts_format) {
return CSV_NAMING_I_TIME_SLICE;
} else {
return CSV_NAMING_I_SINGLE;
}
} else {
- if (g_arguments->csv_ts_format) {
+ if (stb->csv_ts_format) {
return CSV_NAMING_B_THREAD_TIME_SLICE;
} else {
return CSV_NAMING_B_THREAD;
@@ -82,11 +187,11 @@ static void csvCalcTimestampStep(CsvWriteMeta* write_meta) {
time_t ts_step = 0;
if (write_meta->db->precision == TSDB_TIME_PRECISION_MICRO) {
- ts_step = g_arguments->csv_ts_intv_secs * 1000000L;
+ ts_step = write_meta->stb->csv_ts_intv_secs * 1000000L;
} else if (write_meta->db->precision == TSDB_TIME_PRECISION_NANO) {
- ts_step = g_arguments->csv_ts_intv_secs * 1000000000L;
+ ts_step = write_meta->stb->csv_ts_intv_secs * 1000000000L;
} else {
- ts_step = g_arguments->csv_ts_intv_secs * 1000L;
+ ts_step = write_meta->stb->csv_ts_intv_secs * 1000L;
}
write_meta->ts_step = ts_step;
return;
@@ -145,7 +250,7 @@ static int csvGenCsvHeader(CsvWriteMeta* write_meta) {
int pos = 0;
int size = sizeof(write_meta->csv_header);
- if (!g_arguments->csv_output_header) {
+ if (!write_meta->stb->csv_output_header) {
return 0;
}
@@ -159,7 +264,7 @@ static int csvGenCsvHeader(CsvWriteMeta* write_meta) {
}
// tbname
- pos += snprintf(buf + pos, size - pos, ",%s", g_arguments->csv_tbname_alias);
+ pos += snprintf(buf + pos, size - pos, ",%s", write_meta->stb->csv_tbname_alias);
// tags
for (size_t i = 0; i < stb->tags->size; ++i) {
@@ -479,23 +584,23 @@ static int csvInitWriteMeta(SDataBase* db, SSuperTable* stb, CsvWriteMeta* write
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace|no-time-slice");
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace::normal");
break;
}
case CSV_NAMING_I_TIME_SLICE: {
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace|time-slice");
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "interlace::time-slice");
csvCalcTimestampStep(write_meta);
break;
}
case CSV_NAMING_B_THREAD: {
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]|no-time-slice", write_meta->total_threads);
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]::normal", write_meta->total_threads);
csvGenThreadFormatter(write_meta);
break;
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
write_meta->total_threads = MIN(g_arguments->nthreads, stb->childTblCount);
- (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]|time-slice", write_meta->total_threads);
+ (void)snprintf(write_meta->mode, sizeof(write_meta->mode), "batch[%zu]::time-slice", write_meta->total_threads);
csvGenThreadFormatter(write_meta);
csvCalcTimestampStep(write_meta);
break;
@@ -535,7 +640,7 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
thread_meta->start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp);
- thread_meta->end_secs = thread_meta->start_secs + g_arguments->csv_ts_intv_secs;
+ thread_meta->end_secs = thread_meta->start_secs + write_meta->stb->csv_ts_intv_secs;
break;
}
default: {
@@ -558,7 +663,7 @@ static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
thread_meta->start_secs = csvGetStartSeconds(db->precision, last_end_ts);
- thread_meta->end_secs = thread_meta->start_secs + g_arguments->csv_ts_intv_secs;
+ thread_meta->end_secs = thread_meta->start_secs + write_meta->stb->csv_ts_intv_secs;
break;
}
default: {
@@ -570,8 +675,8 @@ static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_
}
-static const char* csvGetGzipFilePrefix() {
- if (g_arguments->csv_compress_level == CSV_COMPRESS_NONE) {
+static const char* csvGetGzipFilePrefix(CsvCompressionLevel csv_compress_level) {
+ if (csv_compress_level == CSV_COMPRESS_NONE) {
return "";
} else {
return ".gz";
@@ -585,8 +690,8 @@ static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_me
char end_time_buf[MIDDLE_BUFF_LEN];
int ret = -1;
const char* base_path = g_arguments->output_path;
- const char* file_prefix = g_arguments->csv_file_prefix;
- const char* gzip_suffix = csvGetGzipFilePrefix();
+ const char* file_prefix = write_meta->stb->csv_file_prefix;
+ const char* gzip_suffix = csvGetGzipFilePrefix(write_meta->stb->csv_compress_level);
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE: {
@@ -594,8 +699,8 @@ static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_me
break;
}
case CSV_NAMING_I_TIME_SLICE: {
- csvConvertTime2String(thread_meta->start_secs, start_time_buf, sizeof(start_time_buf));
- csvConvertTime2String(thread_meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ csvConvertTime2String(thread_meta->start_secs, write_meta->stb->csv_ts_format, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(thread_meta->end_secs, write_meta->stb->csv_ts_format, end_time_buf, sizeof(end_time_buf));
ret = snprintf(fullname, size, "%s%s_%s_%s.csv%s", base_path, file_prefix, start_time_buf, end_time_buf, gzip_suffix);
break;
}
@@ -606,8 +711,8 @@ static int csvGetFileFullname(CsvWriteMeta* write_meta, CsvThreadMeta* thread_me
}
case CSV_NAMING_B_THREAD_TIME_SLICE: {
(void)snprintf(thread_buf, sizeof(thread_buf), write_meta->thread_formatter, thread_meta->thread_id);
- csvConvertTime2String(thread_meta->start_secs, start_time_buf, sizeof(start_time_buf));
- csvConvertTime2String(thread_meta->end_secs, end_time_buf, sizeof(end_time_buf));
+ csvConvertTime2String(thread_meta->start_secs, write_meta->stb->csv_ts_format, start_time_buf, sizeof(start_time_buf));
+ csvConvertTime2String(thread_meta->end_secs, write_meta->stb->csv_ts_format, end_time_buf, sizeof(end_time_buf));
ret = snprintf(fullname, size, "%s%s_%s_%s_%s.csv%s", base_path, file_prefix, thread_buf, start_time_buf, end_time_buf, gzip_suffix);
break;
}
@@ -968,7 +1073,7 @@ static void* csvGenStbThread(void* arg) {
}
// create fd
- fhdl = csvOpen(fullname, g_arguments->csv_compress_level);
+ fhdl = csvOpen(fullname, stb->csv_compress_level);
if (fhdl == NULL) {
errorPrint("Failed to create csv file. thread index: %zu, file: %s, errno: %d, strerror: %s.\n",
thread_meta->thread_id, fullname, errno, strerror(errno));
@@ -976,7 +1081,7 @@ static void* csvGenStbThread(void* arg) {
}
- thread_meta->output_header = g_arguments->csv_output_header;
+ thread_meta->output_header = stb->csv_output_header;
slice_cur_ts = cur_ts;
slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
file_rows = 0;
@@ -1148,106 +1253,6 @@ static int csvGenStb(SDataBase* db, SSuperTable* stb) {
}
-static int csvValidateParamTsFormat(const char* csv_ts_format) {
- if (!csv_ts_format) return 0;
-
- struct tm test_tm = {
- .tm_year = 70,
- .tm_mon = 0,
- .tm_mday = 1,
- .tm_hour = 0,
- .tm_min = 0,
- .tm_sec = 0,
- .tm_isdst = -1
- };
- mktime(&test_tm);
-
- char buffer[1024];
- size_t len = strftime(buffer, sizeof(buffer), csv_ts_format, &test_tm);
- if (len == 0) {
- return -1;
- }
-
-#ifdef _WIN32
- const char* invalid_chars = "/\\:*?\"<>|";
-#else
- const char* invalid_chars = "/\\?\"<>|";
-#endif
- if (strpbrk(buffer, invalid_chars) != NULL) {
- return -1;
- }
-
- return 0;
-}
-
-
-static long csvValidateParamTsInterval(const char* csv_ts_interval) {
- if (!csv_ts_interval || *csv_ts_interval == '\0') return -1;
-
- char* endptr;
- errno = 0;
- const long num = strtol(csv_ts_interval, &endptr, 10);
-
- if (errno == ERANGE ||
- endptr == csv_ts_interval ||
- num <= 0) {
- return -1;
- }
-
- if (*endptr == '\0' ||
- *(endptr + 1) != '\0') {
- return -1;
- }
-
- switch (tolower(*endptr)) {
- case 's': return num;
- case 'm': return num * 60;
- case 'h': return num * 60 * 60;
- case 'd': return num * 60 * 60 * 24;
- default : return -1;
- }
-}
-
-
-static int csvParseParameter() {
- // csv_output_path
- size_t len = strlen(g_arguments->output_path);
- if (len == 0) {
- errorPrint("Failed to generate csv files, the specified output path is empty. Please provide a valid path.\n");
- return -1;
- }
- if (g_arguments->output_path[len - 1] != '/') {
- int n = snprintf(g_arguments->output_path_buf, sizeof(g_arguments->output_path_buf), "%s/", g_arguments->output_path);
- if (n < 0 || n >= sizeof(g_arguments->output_path_buf)) {
- errorPrint("Failed to generate csv files, path buffer overflow risk when appending '/'. path: %s.\n",
- g_arguments->output_path);
- return -1;
- }
- g_arguments->output_path = g_arguments->output_path_buf;
- }
-
- // csv_ts_format
- if (g_arguments->csv_ts_format) {
- if (csvValidateParamTsFormat(g_arguments->csv_ts_format) != 0) {
- errorPrint("Failed to generate csv files, the parameter `csv_ts_format` is invalid. csv_ts_format: %s.\n",
- g_arguments->csv_ts_format);
- return -1;
- }
- }
-
- // csv_ts_interval
- long csv_ts_intv_secs = csvValidateParamTsInterval(g_arguments->csv_ts_interval);
- if (csv_ts_intv_secs <= 0) {
- errorPrint("Failed to generate csv files, the parameter `csv_ts_interval` is invalid. csv_ts_interval: %s.\n",
- g_arguments->csv_ts_interval);
- return -1;
- }
- g_arguments->csv_ts_intv_secs = csv_ts_intv_secs;
-
- return 0;
-}
-
-
static int csvWriteThread() {
for (size_t i = 0; i < g_arguments->databases->size && !g_arguments->terminate; ++i) {
// database
@@ -1260,8 +1265,16 @@ static int csvWriteThread() {
continue;
}
+ // parsing parameters
+ int ret = csvParseStbParameter(stb);
+ if (ret != 0) {
+ errorPrint("Failed to parse csv parameter. database: %s, super table: %s, error code: %d.\n",
+ db->dbName, stb->stbName, ret);
+ return -1;
+ }
+
// gen csv
- int ret = csvGenStb(db, stb);
+ ret = csvGenStb(db, stb);
if(ret != 0) {
errorPrint("Failed to generate csv files. database: %s, super table: %s, error code: %d.\n",
db->dbName, stb->stbName, ret);
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 9bc8527130..83edc5c6ef 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1405,6 +1405,65 @@ static int getStableInfo(tools_cJSON *dbinfos, int index) {
}
}
}
+
+ // csv file prefix
+ tools_cJSON* csv_fp = tools_cJSON_GetObjectItem(stbInfo, "csv_file_prefix");
+ if (csv_fp && csv_fp->type == tools_cJSON_String && csv_fp->valuestring != NULL) {
+ superTable->csv_file_prefix = csv_fp->valuestring;
+ } else {
+ superTable->csv_file_prefix = "data";
+ }
+
+ // csv timestamp format
+ tools_cJSON* csv_tf = tools_cJSON_GetObjectItem(stbInfo, "csv_ts_format");
+ if (csv_tf && csv_tf->type == tools_cJSON_String && csv_tf->valuestring != NULL) {
+ superTable->csv_ts_format = csv_tf->valuestring;
+ } else {
+ superTable->csv_ts_format = NULL;
+ }
+
+ // csv timestamp format
+ tools_cJSON* csv_ti = tools_cJSON_GetObjectItem(stbInfo, "csv_ts_interval");
+ if (csv_ti && csv_ti->type == tools_cJSON_String && csv_ti->valuestring != NULL) {
+ superTable->csv_ts_interval = csv_ti->valuestring;
+ } else {
+ superTable->csv_ts_interval = "1d";
+ }
+
+ // csv output header
+ superTable->csv_output_header = true;
+ tools_cJSON* oph = tools_cJSON_GetObjectItem(stbInfo, "csv_output_header");
+ if (oph && oph->type == tools_cJSON_String && oph->valuestring != NULL) {
+ if (0 == strcasecmp(oph->valuestring, "yes") || 0 == strcasecmp(oph->valuestring, "true")) {
+ superTable->csv_output_header = true;
+ } else if (0 == strcasecmp(oph->valuestring, "no") || 0 == strcasecmp(oph->valuestring, "false")) {
+ superTable->csv_output_header = false;
+ }
+ }
+
+ // csv tbname alias
+ tools_cJSON* tba = tools_cJSON_GetObjectItem(stbInfo, "csv_tbname_alias");
+ if (tba && tba->type == tools_cJSON_String && tba->valuestring != NULL) {
+ superTable->csv_tbname_alias = tba->valuestring;
+ } else {
+ superTable->csv_tbname_alias = "device_id";
+ }
+
+ // csv compression level
+ tools_cJSON* cl = tools_cJSON_GetObjectItem(stbInfo, "csv_compress_level");
+ if (cl && cl->type == tools_cJSON_String && cl->valuestring != NULL) {
+ if (0 == strcasecmp(cl->valuestring, "none")) {
+ superTable->csv_compress_level = CSV_COMPRESS_NONE;
+ } else if (0 == strcasecmp(cl->valuestring, "fast")) {
+ superTable->csv_compress_level = CSV_COMPRESS_FAST;
+ } else if (0 == strcasecmp(cl->valuestring, "balance")) {
+ superTable->csv_compress_level = CSV_COMPRESS_BALANCE;
+ } else if (0 == strcasecmp(cl->valuestring, "best")) {
+ superTable->csv_compress_level = CSV_COMPRESS_BEST;
+ }
+ } else {
+ superTable->csv_compress_level = CSV_COMPRESS_NONE;
+ }
}
return 0;
}
@@ -1595,65 +1654,6 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
(void)mkdir(g_arguments->output_path, 0775);
- // csv file prefix
- tools_cJSON* csv_fp = tools_cJSON_GetObjectItem(json, "csv_file_prefix");
- if (csv_fp && csv_fp->type == tools_cJSON_String && csv_fp->valuestring != NULL) {
- g_arguments->csv_file_prefix = csv_fp->valuestring;
- } else {
- g_arguments->csv_file_prefix = "data";
- }
-
- // csv timestamp format
- tools_cJSON* csv_tf = tools_cJSON_GetObjectItem(json, "csv_ts_format");
- if (csv_tf && csv_tf->type == tools_cJSON_String && csv_tf->valuestring != NULL) {
- g_arguments->csv_ts_format = csv_tf->valuestring;
- } else {
- g_arguments->csv_ts_format = NULL;
- }
-
- // csv timestamp format
- tools_cJSON* csv_ti = tools_cJSON_GetObjectItem(json, "csv_ts_interval");
- if (csv_ti && csv_ti->type == tools_cJSON_String && csv_ti->valuestring != NULL) {
- g_arguments->csv_ts_interval = csv_ti->valuestring;
- } else {
- g_arguments->csv_ts_interval = "1d";
- }
-
- // csv output header
- g_arguments->csv_output_header = true;
- tools_cJSON* oph = tools_cJSON_GetObjectItem(json, "csv_output_header");
- if (oph && oph->type == tools_cJSON_String && oph->valuestring != NULL) {
- if (0 == strcasecmp(oph->valuestring, "yes") || 0 == strcasecmp(oph->valuestring, "true")) {
- g_arguments->csv_output_header = true;
- } else if (0 == strcasecmp(oph->valuestring, "no") || 0 == strcasecmp(oph->valuestring, "false")) {
- g_arguments->csv_output_header = false;
- }
- }
-
- // csv tbname alias
- tools_cJSON* tba = tools_cJSON_GetObjectItem(json, "csv_tbname_alias");
- if (tba && tba->type == tools_cJSON_String && tba->valuestring != NULL) {
- g_arguments->csv_tbname_alias = tba->valuestring;
- } else {
- g_arguments->csv_tbname_alias = "device_id";
- }
-
- // csv compression level
- tools_cJSON* cl = tools_cJSON_GetObjectItem(json, "csv_compress_level");
- if (cl && cl->type == tools_cJSON_String && cl->valuestring != NULL) {
- if (0 == strcasecmp(cl->valuestring, "none")) {
- g_arguments->csv_compress_level = CSV_COMPRESS_NONE;
- } else if (0 == strcasecmp(cl->valuestring, "fast")) {
- g_arguments->csv_compress_level = CSV_COMPRESS_FAST;
- } else if (0 == strcasecmp(cl->valuestring, "balance")) {
- g_arguments->csv_compress_level = CSV_COMPRESS_BALANCE;
- } else if (0 == strcasecmp(cl->valuestring, "best")) {
- g_arguments->csv_compress_level = CSV_COMPRESS_BEST;
- }
- } else {
- g_arguments->csv_compress_level = CSV_COMPRESS_NONE;
- }
-
code = 0;
return code;
}
From 80b7d95ebf4fb68436f2a3d55317dd5a90c1bc97 Mon Sep 17 00:00:00 2001
From: haoranchen
Date: Wed, 5 Mar 2025 11:33:35 +0800
Subject: [PATCH 073/105] docs: update mathematical notation for clarity in
multi.md
---
docs/zh/08-operation/12-multi.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/zh/08-operation/12-multi.md b/docs/zh/08-operation/12-multi.md
index 994192e8fc..7a8a429783 100644
--- a/docs/zh/08-operation/12-multi.md
+++ b/docs/zh/08-operation/12-multi.md
@@ -125,7 +125,7 @@ s3migrate database ;
当 TSDB 时序数据超过 `s3_keeplocal` 参数指定的时间,相关的数据文件会被切分成多个文件块,每个文件块的默认大小是 512M 字节 (`s3_chunkpages * tsdb_pagesize`)。除了最后一个文件块保留在本地文件系统外,其余的文件块会被上传到对象存储服务。
```math
-上传次数 = 数据文件大小 / (s3_chunkpages * tsdb_pagesize) - 1
+\text{上传次数} = \frac{\text{数据文件大小}}{\text{s3\_chunkpages} \times \text{tsdb\_pagesize}} - 1
```
在创建数据库时,可以通过 `s3_chunkpages` 参数调整每个文件块的大小,从而控制每个数据文件的上传次数。
@@ -139,7 +139,7 @@ s3migrate database ;
相邻的多个数据页会作为一个数据块从对象存储下载一次,以减少从对象存储下载的次数。每个数据页的大小,在创建数据库时,通过 `tsdb_pagesize` 参数指定,默认 4K 字节。
```math
-下载次数 = 查询需要的数据块数量 - 已缓存的数据块数量
+\text{下载次数} = \text{查询需要的数据块数量} - \text{已缓存的数据块数量}
```
页缓存是内存缓存,节点重启后,再次查询需要重新下载数据。缓存采用 LRU (Least Recently Used) 策略,当缓存空间不足时,最近最少使用的数据将被淘汰。缓存的大小可以通过 `s3PageCacheSize` 参数进行调整,通常来说,缓存越大,下载次数越少。
From 1b2afe31edeb61051e7a58f510c29feb5d9a3e82 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Wed, 5 Mar 2025 18:15:02 +0800
Subject: [PATCH 074/105] fix: fix bug in time slice window calculation
---
tools/taos-tools/inc/benchCsv.h | 2 +
tools/taos-tools/src/benchCsv.c | 89 ++++++++++++++++++++++++++-------
2 files changed, 74 insertions(+), 17 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index e80f73bcda..624bcadedc 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -76,6 +76,8 @@ typedef struct {
uint64_t total_rows;
time_t start_secs;
time_t end_secs;
+ int64_t start_ts;
+ int64_t end_ts;
size_t thread_id;
bool output_header;
int tags_buf_size;
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index dd6ce3360a..b498214468 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -136,7 +136,43 @@ static int csvParseStbParameter(SSuperTable* stb) {
}
-static time_t csvGetStartSeconds(int precision, int64_t start_ts) {
+static time_t csvAlignTimestamp(time_t seconds, const char* ts_format) {
+ struct tm aligned_tm;
+#ifdef _WIN32
+ localtime_s(&aligned_tm, &seconds);
+#else
+ localtime_r(&seconds, &aligned_tm);
+#endif
+
+ int has_Y = 0, has_m = 0, has_d = 0, has_H = 0, has_M = 0, has_S = 0;
+ const char* p = ts_format;
+ while (*p) {
+ if (*p == '%') {
+ p++;
+ switch (*p) {
+ case 'Y': has_Y = 1; break;
+ case 'm': has_m = 1; break;
+ case 'd': has_d = 1; break;
+ case 'H': has_H = 1; break;
+ case 'M': has_M = 1; break;
+ case 'S': has_S = 1; break;
+ }
+ }
+ p++;
+ }
+
+ if (!has_S) aligned_tm.tm_sec = 0;
+ if (!has_M) aligned_tm.tm_min = 0;
+ if (!has_H) aligned_tm.tm_hour = 0;
+ if (!has_d) aligned_tm.tm_mday = 1;
+ if (!has_m) aligned_tm.tm_mon = 0;
+ if (!has_Y) aligned_tm.tm_year = 0;
+
+ return mktime(&aligned_tm);
+}
+
+
+static time_t csvGetStartSeconds(int precision, int64_t start_ts, const char* csv_ts_format) {
time_t start_seconds = 0;
if (precision == TSDB_TIME_PRECISION_MICRO) {
@@ -146,17 +182,17 @@ static time_t csvGetStartSeconds(int precision, int64_t start_ts) {
} else {
start_seconds = start_ts / 1000L;
}
- return start_seconds;
+ return csvAlignTimestamp(start_seconds, csv_ts_format);
}
static void csvConvertTime2String(time_t time_value, char* ts_format, char* time_buf, size_t buf_size) {
struct tm tm_result;
- char *old_locale = setlocale(LC_TIME, "C");
+ char* old_locale = setlocale(LC_TIME, "C");
#ifdef _WIN32
- gmtime_s(&tm_result, &time_value);
+ localtime_s(&tm_result, &time_value);
#else
- gmtime_r(&time_value, &tm_result);
+ localtime_r(&time_value, &tm_result);
#endif
strftime(time_buf, buf_size, ts_format, &tm_result);
if (old_locale) {
@@ -183,17 +219,29 @@ static CsvNamingType csvGetFileNamingType(SSuperTable* stb) {
}
-static void csvCalcTimestampStep(CsvWriteMeta* write_meta) {
- time_t ts_step = 0;
+static time_t csvCalcTimestampFromSeconds(int precision, time_t secs) {
+ time_t ts = 0;
- if (write_meta->db->precision == TSDB_TIME_PRECISION_MICRO) {
- ts_step = write_meta->stb->csv_ts_intv_secs * 1000000L;
- } else if (write_meta->db->precision == TSDB_TIME_PRECISION_NANO) {
- ts_step = write_meta->stb->csv_ts_intv_secs * 1000000000L;
+ if (precision == TSDB_TIME_PRECISION_MICRO) {
+ ts = secs * 1000000L;
+ } else if (precision == TSDB_TIME_PRECISION_NANO) {
+ ts = secs * 1000000000L;
} else {
- ts_step = write_meta->stb->csv_ts_intv_secs * 1000L;
+ ts = secs * 1000L;
}
- write_meta->ts_step = ts_step;
+ return ts;
+}
+
+
+static void csvCalcTimestampStep(CsvWriteMeta* write_meta) {
+ write_meta->ts_step = csvCalcTimestampFromSeconds(write_meta->db->precision, write_meta->stb->csv_ts_intv_secs);
+ return;
+}
+
+
+static void csvCalcSliceTimestamp(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta) {
+ thread_meta->start_ts = csvCalcTimestampFromSeconds(write_meta->db->precision, thread_meta->start_secs);
+ thread_meta->end_ts = csvCalcTimestampFromSeconds(write_meta->db->precision, thread_meta->end_secs);
return;
}
@@ -624,6 +672,8 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
thread_meta->ctb_count = 0;
thread_meta->start_secs = 0;
thread_meta->end_secs = 0;
+ thread_meta->start_ts = write_meta->start_ts;
+ thread_meta->end_ts = write_meta->end_ts;
thread_meta->thread_id = thread_id;
thread_meta->output_header = false;
thread_meta->tags_buf_size = 0;
@@ -639,8 +689,9 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
}
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- thread_meta->start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp);
+ thread_meta->start_secs = csvGetStartSeconds(db->precision, stb->startTimestamp, stb->csv_ts_format);
thread_meta->end_secs = thread_meta->start_secs + write_meta->stb->csv_ts_intv_secs;
+ csvCalcSliceTimestamp(write_meta, thread_meta);
break;
}
default: {
@@ -654,6 +705,7 @@ static void csvInitThreadMeta(CsvWriteMeta* write_meta, uint32_t thread_id, CsvT
static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_meta, int64_t last_end_ts) {
SDataBase* db = write_meta->db;
+ SSuperTable* stb = write_meta->stb;
switch (write_meta->naming_type) {
case CSV_NAMING_I_SINGLE:
@@ -662,8 +714,9 @@ static void csvUpdateSliceRange(CsvWriteMeta* write_meta, CsvThreadMeta* thread_
}
case CSV_NAMING_I_TIME_SLICE:
case CSV_NAMING_B_THREAD_TIME_SLICE: {
- thread_meta->start_secs = csvGetStartSeconds(db->precision, last_end_ts);
+ thread_meta->start_secs = csvGetStartSeconds(db->precision, last_end_ts, stb->csv_ts_format);
thread_meta->end_secs = thread_meta->start_secs + write_meta->stb->csv_ts_intv_secs;
+ csvCalcSliceTimestamp(write_meta, thread_meta);
break;
}
default: {
@@ -1063,7 +1116,8 @@ static void* csvGenStbThread(void* arg) {
thread_meta->cols_buf = &cols_buf;
start_print_ts = toolsGetTimestampMs();
- for (cur_ts = write_meta->start_ts; cur_ts < write_meta->end_ts; cur_ts += write_meta->ts_step) {
+ cur_ts = write_meta->start_ts;
+ while (cur_ts < write_meta->end_ts) {
// get filename
ret = csvGetFileFullname(write_meta, thread_meta, fullname, sizeof(fullname));
if (ret < 0) {
@@ -1083,7 +1137,7 @@ static void* csvGenStbThread(void* arg) {
thread_meta->output_header = stb->csv_output_header;
slice_cur_ts = cur_ts;
- slice_end_ts = MIN(cur_ts + write_meta->ts_step, write_meta->end_ts);
+ slice_end_ts = MIN(thread_meta->end_ts, write_meta->end_ts);
file_rows = 0;
pre_print_ts = toolsGetTimestampMs();
@@ -1129,6 +1183,7 @@ static void* csvGenStbThread(void* arg) {
}
csvClose(fhdl);
+ cur_ts = thread_meta->end_ts;
csvUpdateSliceRange(write_meta, thread_meta, slice_end_ts);
}
From 36bdb20078b80594c122b18b3e16537dd969ae65 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Wed, 5 Mar 2025 19:45:35 +0800
Subject: [PATCH 075/105] docs: minor changes
---
docs/zh/06-advanced/05-data-in/index.md | 4 +-
.../zh/14-reference/01-components/01-taosd.md | 344 +++++++++---------
.../14-reference/03-taos-sql/10-function.md | 50 +--
.../03-taos-sql/12-distinguished.md | 8 +-
docs/zh/14-reference/03-taos-sql/14-stream.md | 2 +-
5 files changed, 204 insertions(+), 204 deletions(-)
diff --git a/docs/zh/06-advanced/05-data-in/index.md b/docs/zh/06-advanced/05-data-in/index.md
index cf785eb128..9699455110 100644
--- a/docs/zh/06-advanced/05-data-in/index.md
+++ b/docs/zh/06-advanced/05-data-in/index.md
@@ -154,8 +154,8 @@ let v3 = data["voltage"].split(",");
如下图所示
-* 对字段`ts`使用 split 规则拆分成日期和时间。split 规则需要设置**分隔符**和**拆分数量**,拆分后的字段命名规则为`{原字段名}_{顺序号}`。
-* 对字段`voltage`使用正则表达式 `^(?[0-9]+)(?[a-zA-Z]+)$` 提取出电压值和电压单位,Regex 规则同解析过程中的一样,使用**命名捕获组**命名提取字段。
+* 对字段 `ts` 使用 split 规则拆分成日期和时间。split 规则需要设置 **分隔符** 和 **拆分数量**,拆分后的字段命名规则为 `{原字段名}_{顺序号}`。
+* 对字段 `voltage` 使用正则表达式 `^(?[0-9]+)(?[a-zA-Z]+)$` 提取出电压值和电压单位,Regex 规则同解析过程中的一样,使用 **命名捕获组** 命名提取字段。
* 对字段 `location` 使用 convert 转换,填写一个 JSON map 对象,其中 key 为字段 `current` 的值,`value` 为转换后的值。如图,`location` 字段的值 `"beijing.chaoyang.datun"` 被转换为 `"beijing.chaoyang.datunludong"`。

diff --git a/docs/zh/14-reference/01-components/01-taosd.md b/docs/zh/14-reference/01-components/01-taosd.md
index 85f0ccd822..5ae2640541 100644
--- a/docs/zh/14-reference/01-components/01-taosd.md
+++ b/docs/zh/14-reference/01-components/01-taosd.md
@@ -33,21 +33,21 @@ taosd 命令行参数如下
- 类型:endpoint
- 默认值:localhost:6030
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### secondEp
- 说明:taosd 启动时,如果 firstEp 连接不上,尝试连接集群中第二个 dnode 的 endpoint
- 类型:endpoint
- 默认值:无
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### fqdn
- 说明:taosd 监听的服务地址
- 类型:fqdn
- 默认值:所在服务器上配置的第一个 hostname
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### serverPort
- 说明:taosd 监听的端口
@@ -56,7 +56,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:65056
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### compressMsgSize
- 说明:是否对 RPC 消息进行压缩
@@ -65,7 +65,7 @@ taosd 命令行参数如下
- 最小值:-1
- 最大值:100000000
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### shellActivityTimer
- 说明:客户端向 mnode 发送心跳的时长
@@ -75,7 +75,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:120
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### numOfRpcSessions
- 说明:RPC 支持的最大连接数
@@ -84,7 +84,7 @@ taosd 命令行参数如下
- 最小值:100
- 最大值:100000
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfRpcThreads
- 说明:RPC 收发数据的线程数目
@@ -93,7 +93,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:50
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### numOfTaskQueueThreads
- 说明:RPC 处理消息的线程数目
@@ -102,7 +102,7 @@ taosd 命令行参数如下
- 最小值:4
- 最大值:16
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### rpcQueueMemoryAllowed
- 说明:dnode 已经收到并等待处理的 RPC 消息占用内存的最大值
@@ -112,7 +112,7 @@ taosd 命令行参数如下
- 最小值:104857600
- 最大值:INT64_MAX
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### resolveFQDNRetryTime
- 说明:FQDN 解析失败时的重试次数
@@ -121,7 +121,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:10240
- 动态修改:不支持
-- 支持版本:v3.3.4.0 版本之后取消
+- 支持版本:v3.3.4.0 之后取消
#### timeToGetAvailableConn
- 说明:获得可用连接的最长等待时间
@@ -131,7 +131,7 @@ taosd 命令行参数如下
- 最小值:20
- 最大值:1000000
- 动态修改:不支持
-- 支持版本:v3.3.4.0 版本之后取消
+- 支持版本:v3.3.4.0 之后取消
#### maxShellConns
- 说明:允许创建的最大链接数
@@ -140,7 +140,7 @@ taosd 命令行参数如下
- 最小值:10
- 最大值:50000000
- 动态修改:不支持
-- 支持版本:v3.3.4.0 版本之后取消
+- 支持版本:v3.3.4.0 之后取消
#### maxRetryWaitTime
- 说明:重连最大超时时间,从重试时候开始计算
@@ -150,7 +150,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:86400000
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.3.4.0 版本开始引入
+- 支持版本:v3.3.4.0 引入
#### shareConnLimit
- 说明:一个链接可以共享的请求的数目
@@ -159,7 +159,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:512
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.3.4.0 版本开始引入
+- 支持版本:v3.3.4.0 引入
#### readTimeout
- 说明:单个请求最小超时时间
@@ -169,7 +169,7 @@ taosd 命令行参数如下
- 最小值:64
- 最大值:604800
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.3.4.0 版本开始引入
+- 支持版本:v3.3.4.0 引入
### 监控相关
@@ -180,14 +180,14 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorFqdn
- 说明:taosKeeper 服务所在服务器的地址
- 类型:fqdn
- 默认值:无
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorPort
- 说明:taosKeeper 服务所监听的端口号
@@ -196,7 +196,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:65056
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorInterval
- 说明:监控数据库记录系统参数(CPU/内存)的时间间隔
@@ -206,7 +206,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:200000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorMaxLogs
- 说明:缓存的待上报日志条数
@@ -215,7 +215,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:1000000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorComp
- 说明:是否采用压缩方式上报监控日志
@@ -224,7 +224,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,重启后生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### monitorLogProtocol
- 说明:是否打印监控日志
@@ -233,7 +233,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### monitorForceV2
- 说明:是否使用 V2 版本协议上报日志
@@ -242,7 +242,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### telemetryReporting
- 说明:是否上传 telemetry
@@ -251,14 +251,14 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### telemetryServer
- 说明:telemetry 服务器地址
- 类型:fqdn
- 默认值:telemetry.taosdata.com
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### telemetryPort
- 说明:telemetry 服务器端口号
@@ -267,7 +267,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:65056
- 动态修改:不支持
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### telemetryInterval
- 说明:telemetry 上传时间间隔
@@ -277,7 +277,7 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:200000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### crashReporting
- 说明:是否使用 V2 版本协议上报日志
@@ -286,7 +286,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
### 查询相关
@@ -297,7 +297,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.0.0.0 版本开始引入
+- 支持版本:v3.0.0.0 引入
#### tagFilterCache
- 说明:是否缓存标签过滤结果
@@ -306,7 +306,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryBufferSize
- 说明:查询可用的缓存大小
@@ -325,7 +325,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryUseMemoryPool
- 说明:查询是否使用内存池管理内存
@@ -334,7 +334,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:不支持
-- 支持版本:从 v3.3.5.0 版本开始引入
+- 支持版本:v3.3.5.0 引入
#### minReservedMemorySize
- 说明:最小预留的系统可用内存数量,除预留外的内存都可以被用于查询
@@ -344,7 +344,7 @@ taosd 命令行参数如下
- 最小值:1024
- 最大值:1000000000
- 动态修改:不支持
-- 支持版本:从 v3.3.5.0 版本开始引入
+- 支持版本:v3.3.5.0 引入
#### singleQueryMaxMemorySize
- 说明:单个查询在单个节点(dnode)上可以使用的内存上限,超过该上限将返回错
@@ -354,7 +354,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1000000000
- 动态修改:不支持
-- 支持版本:从 v3.3.5.0 版本开始引入
+- 支持版本:v3.3.5.0 引入
#### filterScalarMode
- 说明:强制使用标量过滤模式
@@ -363,7 +363,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryNoFetchTimeoutSec
- 说明:查询中当应用长时间不 FETCH 数据时的超时时间,从最后一次响应起计时,超时自动清除任务 `内部参数`
@@ -372,7 +372,7 @@ taosd 命令行参数如下
- 最小值:60
- 最大值:1000000000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryPlannerTrace
- 说明:查询计划是否输出详细日志 `内部参数`
@@ -381,7 +381,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryNodeChunkSize
- 说明:查询计划的块大小 `内部参数`
@@ -391,7 +391,7 @@ taosd 命令行参数如下
- 最小值:1024
- 最大值:128 * 1024
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryUseNodeAllocator
- 说明:查询计划的分配方法 `内部参数`
@@ -400,7 +400,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryMaxConcurrentTables
- 说明:查询计划的分配方法 `内部参数`
@@ -409,7 +409,7 @@ taosd 命令行参数如下
- 最小值:INT64_M
- 最大值:INT64_MAX
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### queryRsmaTolerance
- 说明:查询计划的分配方法 `内部参数`
@@ -418,7 +418,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:900000
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### enableQueryHb
- 说明:是否发送查询心跳消息 `内部参数`
@@ -427,7 +427,7 @@ taosd 命令行参数如下
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### pqSortMemThreshold
- 说明:排序使用的内存阈值 `内部参数`
@@ -437,26 +437,26 @@ taosd 命令行参数如下
- 最小值:1
- 最大值:10240
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
### 区域相关
#### timezone
- 说明:时区
- 默认值:从系统中动态获取当前的时区设置
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### locale
- 说明:系统区位信息及编码格式
- 默认值:从系统中获取
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### charset
- 说明:字符集编码
- 默认值:从系统中获取
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
:::info
#### 区域相关参数说明
@@ -541,7 +541,7 @@ charset 的有效值是 UTF-8。
- 类型:字符串
- 默认值:/var/lib/taos
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### diskIDCheckEnabled
- 说明:在重启 dnode 时增加了检查 dataDir 所在磁盘 id 是否发生改变
@@ -549,14 +549,14 @@ charset 的有效值是 UTF-8。
- 默认值:1
- 最小值:0
- 最大值:1
-- 支持版本:从 v3.3.5.0 版本开始引入
+- 支持版本:v3.3.5.0 引入
#### tempDir
- 说明:指定所有系统运行过程中的临时文件生成的目录
- 类型:字符串
- 默认值:/tmp
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### minimalDataDirGB
- 说明:dataDir 指定的时序数据存储目录所需要保留的最小空间
@@ -566,7 +566,7 @@ charset 的有效值是 UTF-8。
- 最小值:0.001f
- 最大值:10000000
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### minimalTmpDirGB
- 说明:tempDir 所指定的临时文件目录所需要保留的最小空间
@@ -576,7 +576,7 @@ charset 的有效值是 UTF-8。
- 最小值:0.001f
- 最大值:10000000
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### minDiskFreeSize
- 说明:当某块磁盘上的可用空间小于等于这个阈值时,该磁盘将不再被选择用于生成新的数据文件 `企业版参数`
@@ -586,7 +586,7 @@ charset 的有效值是 UTF-8。
- 最小值:52428800
- 最大值:1073741824
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### s3MigrateIntervalSec
- 说明:本地数据文件自动上传 S3 的触发周期 `企业版参数`
@@ -596,7 +596,7 @@ charset 的有效值是 UTF-8。
- 最小值:600
- 最大值:100000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3MigrateEnabled
- 说明:是否自动进行 S3 迁移 `企业版参数`
@@ -605,24 +605,24 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3Accesskey
- 说明:冒号分隔的用户 SecretId:SecretKey `企业版参数`
- 示例:AKIDsQmwsfKxTo2A6nGVXZN0UlofKn6JRRSJ:lIdoy99ygEacU7iHfogaN2Xq0yumSm1E
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3Endpoint
- 说明:用户所在地域的 COS 服务域名,支持 http 和 https,bucket 的区域需要与 endpoint 保持一致,否则无法访问 `企业版参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3BucketName
- 说明:存储桶名称,减号后面是用户注册 COS 服务的 AppId,其中 AppId 是 COS 特有,AWS 和阿里云都没有,配置时需要作为 bucket name 的一部分,使用减号分隔;参数值均为字符串类型,但不需要引号 `企业版参数`
- 示例:test0711-1309024725
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3PageCacheSize
- 说明:S3 page cache 缓存页数目 `企业版参数`
@@ -633,7 +633,7 @@ charset 的有效值是 UTF-8。
- 最大值:1048576
- 示例:test0711-1309024725
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### s3UploadDelaySec
- 说明:data 文件持续多长时间不再变动后上传至 S3 `企业版参数`
@@ -643,7 +643,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:2592000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### cacheLazyLoadThreshold
- 说明:缓存的装载策略 `内部参数`
@@ -652,7 +652,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:100000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
### 集群相关
@@ -663,7 +663,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:4096
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfCommitThreads
- 说明:落盘线程的最大数量
@@ -672,7 +672,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfCompactThreads
- 说明:合并线程的最大数量
@@ -681,7 +681,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:16
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfMnodeReadThreads
- 说明:mnode 的 Read 线程数目
@@ -690,7 +690,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfVnodeQueryThreads
- 说明:vnode 的 Query 线程数目
@@ -699,7 +699,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfVnodeFetchThreads
- 说明:vnode 的 Fetch 线程数目
@@ -708,7 +708,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfVnodeRsmaThreads
- 说明:vnode 的 Rsma 线程数目
@@ -717,7 +717,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfQnodeQueryThreads
- 说明:qnode 的 Query 线程数目
@@ -726,7 +726,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfSnodeSharedThreads
- 说明:snode 的共享线程数目
@@ -735,7 +735,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfSnodeUniqueThreads
- 说明:snode 的独占线程数目
@@ -744,7 +744,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ratioOfVnodeStreamThreads
- 说明:流计算使用 vnode 线程的比例
@@ -753,7 +753,7 @@ charset 的有效值是 UTF-8。
- 最小值:0.01
- 最大值:4
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ttlUnit
- 说明:ttl 参数的单位
@@ -763,7 +763,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:31572500
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ttlPushInterval
- 说明:ttl 检测超时频率
@@ -773,7 +773,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:100000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ttlChangeOnWrite
- 说明:ttl 到期时间是否伴随表的修改操作改变
@@ -782,7 +782,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ttlBatchDropNum
- 说明:ttl 一批删除子表的数目
@@ -791,7 +791,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:2147483647
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### retentionSpeedLimitMB
- 说明:数据在不同级别硬盘上迁移时的速度限制
@@ -801,7 +801,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1024
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### maxTsmaNum
- 说明:集群内可创建的TSMA个数
@@ -810,7 +810,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:3
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tmqMaxTopicNum
- 说明:订阅最多可建立的 topic 数量
@@ -819,7 +819,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:10000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tmqRowSize
- 说明:订阅数据块的最大记录条数
@@ -828,7 +828,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:1000000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### audit
- 说明:审计功能开关;`企业版参数`
@@ -837,7 +837,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### auditInterval
- 说明:审计数据上报的时间间隔;`企业版参数`
@@ -846,7 +846,7 @@ charset 的有效值是 UTF-8。
- 最小值:500
- 最大值:200000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### auditCreateTable
- 说明:是否针对创建子表开启申计功能;`企业版参数`
@@ -855,19 +855,19 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### encryptAlgorithm
- 说明:数据加密算法;`企业版参数`
- 类型:字符串
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### encryptScope
- 说明:加密范围;`企业版参数`
- 类型:字符串
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### enableWhiteList
- 说明:白名单功能开关;`企业版参数`
@@ -876,7 +876,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### syncLogBufferMemoryAllowed
- 说明:一个 dnode 允许的 sync 日志缓存消息占用的内存最大值
@@ -892,109 +892,109 @@ charset 的有效值是 UTF-8。
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### syncHeartbeatInterval
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### syncHeartbeatTimeout
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### syncSnapReplMaxWaitN
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### arbHeartBeatIntervalSec
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### arbCheckSyncIntervalSec
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### arbSetAssignedTimeoutSec
- 说明:用于同步模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### arbSetAssignedTimeoutSec
- 说明:用于 mnode 模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### mndLogRetention
- 说明:用于 mnode 模块调试,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### skipGrant
- 说明:用于授权检查,`内部参数`
- 类型:整数
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### trimVDbIntervalSec
- 说明:用于删除过期数据,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### ttlFlushThreshold
- 说明:ttl 定时器的频率,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### compactPullupInterval
- 说明:数据重整定时器的频率,`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### walFsyncDataSizeLimit
- 说明:WAL 进行 FSYNC 的阈值`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### transPullupInterval
- 说明:mnode 执行事务的重试间`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### mqRebalanceInterval
- 说明:消费者再平衡的时间间隔`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### uptimeInterval
- 说明:用于记录系统启动时间`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### timeseriesThreshold
- 说明:用于统计用量`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### udf
- 说明:是否启动 UDF 服务
@@ -1003,19 +1003,19 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### udfdResFuncs
- 说明:用于统计用量`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### udfdLdLibPath
- 说明:用于统计用量`内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
### 流计算参数
@@ -1026,7 +1026,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### streamBufferSize
- 说明:控制内存中窗口状态缓存的大小
@@ -1036,37 +1036,37 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:9223372036854775807
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### streamAggCnt
- 说明:并发进行聚合计算的数目 `内部参数`
- 类型:整数
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### checkpointInterval
- 说明:checkponit 同步间隔 `内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### concurrentCheckpoint
- 说明:是否并发检查 checkpoint `内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### maxStreamBackendCache
- 说明:流计算使用的最大缓存 `内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### streamSinkDataRate
- 说明:用于控制流计算结果的写入速度 `内部参数`
- 类型:整数
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### streamNotifyMessageSize
- 说明:用于控制事件通知的消息大小 `内部参数`
@@ -1076,7 +1076,7 @@ charset 的有效值是 UTF-8。
- 最小值:8
- 最大值:1048576
- 动态修改:不支持
-- 支持版本:从 v3.3.6.0 版本开始引入
+- 支持版本:v3.3.6.0 引入
#### streamNotifyFrameSize
- 说明:用于控制事件通知消息发送时底层的帧大小 `内部参数`
@@ -1086,7 +1086,7 @@ charset 的有效值是 UTF-8。
- 最小值:8
- 最大值:1048576
- 动态修改:不支持
-- 支持版本:从 v3.3.6.0 版本开始引入
+- 支持版本:v3.3.6.0 引入
### 日志相关
@@ -1095,7 +1095,7 @@ charset 的有效值是 UTF-8。
- 类型:字符串
- 默认值:/var/log/taos
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### minimalLogDirGB
- 说明:日志文件夹所在磁盘可用空间大小小于该值时,停止写日志
@@ -1105,7 +1105,7 @@ charset 的有效值是 UTF-8。
- 最小值:0.001f
- 最大值:10000000
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### numOfLogLines
- 说明:单个日志文件允许的最大行数
@@ -1114,7 +1114,7 @@ charset 的有效值是 UTF-8。
- 最小值:1000
- 最大值:2000000000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### asyncLog
- 说明:日志写入模式
@@ -1123,7 +1123,7 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### logKeepDays
- 说明:日志文件的最长保存时间,小于等于0意味着只有两个日志文件相互切换保存日志,超过两个文件保存数量的日志会被删除;当设置为大于 0 的值时,当日志文件大小达到设置的上限时会被重命名为 taosdlog.yyy,其中 yyy 为日志文件最后修改的时间戳,并滚动产生新的日志文件
@@ -1133,7 +1133,7 @@ charset 的有效值是 UTF-8。
- 最小值:-365000
- 最大值:365000
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### slowLogThreshold
- 说明:慢查询门限值,大于等于门限值认为是慢查询
@@ -1143,7 +1143,7 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:2147483647
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### slowLogMaxLen
- 说明:慢查询日志最大长度
@@ -1152,19 +1152,19 @@ charset 的有效值是 UTF-8。
- 最小值:1
- 最大值:16384
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### slowLogScope
- 说明:慢查询记录类型
- 取值范围:ALL/QUERY/INSERT/OTHERS/NONE
- 默认值:QUERY
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### slowLogExceptDb
- 说明:指定的数据库不上报慢查询,仅支持配置换一个数据库
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.0.0 版本开始引入
+- 支持版本:v3.3.0.0 引入
#### debugFlag
- 说明:运行日志开关,该参数的设置会影响所有模块的开关,后设置的参数起效
@@ -1172,7 +1172,7 @@ charset 的有效值是 UTF-8。
- 取值范围:131(输出错误和警告日志),135(输出错误、警告和调试日志),143(输出错误、警告、调试和跟踪日志)
- 默认值:131 或 135 (取决于不同模块)
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tmrDebugFlag
- 说明:定时器模块的日志开关
@@ -1180,7 +1180,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### uDebugFlag
- 说明:共用功能模块的日志开关
@@ -1188,7 +1188,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### rpcDebugFlag
- 说明:rpc 模块的日志开关
@@ -1196,7 +1196,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### qDebugFlag
- 说明:query 模块的日志开关
@@ -1204,7 +1204,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### dDebugFlag
- 说明:dnode 模块的日志开关
@@ -1212,7 +1212,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### vDebugFlag
- 说明:vnode 模块的日志开关
@@ -1220,7 +1220,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### mDebugFlag
- 说明:mnode 模块的日志开关
@@ -1228,7 +1228,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### azDebugFlag
- 说明:S3 模块的日志开关
@@ -1236,7 +1236,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### sDebugFlag
- 说明:sync 模块的日志开关
@@ -1244,7 +1244,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tsdbDebugFlag
- 说明:tsdb 模块的日志开关
@@ -1252,7 +1252,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tqDebugFlag
- 说明:tq 模块的日志开关
@@ -1260,7 +1260,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### fsDebugFlag
@@ -1269,7 +1269,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### udfDebugFlag
- 说明:udf 模块的日志开关
@@ -1277,7 +1277,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### smaDebugFlag
- 说明:sma 模块的日志开关
@@ -1285,7 +1285,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### idxDebugFlag
- 说明:index 模块的日志开关
@@ -1293,7 +1293,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### tdbDebugFlag
- 说明:tdb 模块的日志开关
@@ -1301,7 +1301,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### metaDebugFlag
- 说明:meta 模块的日志开关
@@ -1309,7 +1309,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### stDebugFlag
- 说明:stream 模块的日志开关
@@ -1317,7 +1317,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### sndDebugFlag
- 说明:snode 模块的日志开关
@@ -1325,7 +1325,7 @@ charset 的有效值是 UTF-8。
- 取值范围:同上
- 默认值:131
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
### 调试相关
@@ -1337,13 +1337,13 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### configDir
- 说明:配置文件所在目录
- 类型:字符串
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### forceReadConfig
- 说明:配置文件所在目录
@@ -1352,13 +1352,13 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:不支持
-- 支持版本:从 v3.3.5.0 版本开始引入
+- 支持版本:v3.3.5.0 引入
#### scriptDir
- 说明:测试工具的脚本目录 `内部参数`
- 类型:字符串
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### assert
- 说明:断言控制开关
@@ -1367,67 +1367,67 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### randErrorChance
- 说明:用于随机失败测试 `内部参数`
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### randErrorDivisor
- 说明:用于随机失败测试 `内部参数`
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### randErrorScope
- 说明:用于随机失败测试 `内部参数`
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### safetyCheckLevel
- 说明:用于随机失败测试 `内部参数`
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### experimental
- 说明:用于一些实验特性 `内部参数`
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### simdEnable
- 说明:用于测试 SIMD 加速 `内部参数`
- 动态修改:不支持
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### AVX512Enable
- 说明:用于测试 AVX512 加速 `内部参数`
- 动态修改:不支持
-- 支持版本:从 v3.3.4.3 版本开始引入
+- 支持版本:v3.3.4.3 引入
#### rsyncPort
- 说明:用于调试流计算 `内部参数`
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### snodeAddress
- 说明:用于调试流计算 `内部参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### checkpointBackupDir
- 说明:用于恢复 snode 数据 `内部参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### enableAuditDelete
- 说明:用于测试审计功能 `内部参数`
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### slowLogThresholdTest
- 说明:用于测试慢日志 `内部参数`
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### bypassFlag
- 说明:配置文件所在目录
@@ -1435,7 +1435,7 @@ charset 的有效值是 UTF-8。
- 取值范围:0:正常写入,1:写入消息在 taos 客户端发送 RPC 消息前返回,2:写入消息在 taosd 服务端收到 RPC 消息后返回,4:写入消息在 taosd 服务端写入内存缓存前返回,8:写入消息在 taosd 服务端数据落盘前返回
- 默认值:0
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.3.4.5 版本开始引入
+- 支持版本:v3.3.4.5 引入
### 压缩参数
@@ -1446,7 +1446,7 @@ charset 的有效值是 UTF-8。
- 最小值:0.00000001
- 最大值:0.1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### dPrecision
- 说明:设置 double 类型浮点数压缩精度,小于此值的浮点数尾数部分将被截取
@@ -1455,14 +1455,14 @@ charset 的有效值是 UTF-8。
- 最小值:0.0000000000000001
- 最大值:0.1
- 动态修改:支持通过 SQL 修改,立即生效
-- 支持版本:从 v3.1.0.0 版本开始引入
+- 支持版本:v3.1.0.0 引入
#### lossyColumn
- 说明:对 float 和/或 double 类型启用 TSZ 有损压缩
- 取值范围:float/double/none
- 默认值:none,表示关闭无损压缩
- 动态修改:不支持
-- 支持版本:从 v3.1.0.0 版本引入,v3.3.0.0 以后废弃
+- 支持版本:v3.1.0.0 引入,v3.3.0.0 以后废弃
#### ifAdtFse
- 说明:在启用 TSZ 有损压缩时,使用 FSE 算法替换 HUFFMAN 算法,FSE 算法压缩速度更快,但解压稍慢,追求压缩速度可选用此算法
@@ -1471,26 +1471,26 @@ charset 的有效值是 UTF-8。
- 最小值:0
- 最大值:1
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本引入,v3.3.0.0 以后废弃
+- 支持版本:v3.1.0.0 引入,v3.3.0.0 以后废弃
#### maxRange
- 说明:用于有损压缩设置 `内部参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本引入,v3.3.0.0 以后废弃
+- 支持版本:v3.1.0.0 引入,v3.3.0.0 以后废弃
#### curRange
- 说明:用于有损压缩设置 `内部参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本引入,v3.3.0.0 以后废弃
+- 支持版本:v3.1.0.0 引入,v3.3.0.0 以后废弃
#### compressor
- 说明:用于有损压缩设置 `内部参数`
- 动态修改:支持通过 SQL 修改,重启生效
-- 支持版本:从 v3.1.0.0 版本引入,v3.3.0.0 以后废弃
+- 支持版本:v3.1.0.0 引入,v3.3.0.0 以后废弃
**补充说明**
-1. 在 3.3.5.0 之后,所有配置参数都将被持久化到本地存储,重启数据库服务后,将默认使用持久化的配置参数列表;如果您希望继续使用 config 文件中配置的参数,需设置 forceReadConfig 为 1。
-2. 在 3.2.0.0 ~ 3.3.0.0(不包含)版本生效,启用该参数后不能回退到升级前的版本
+1. 在 v3.3.5.0 之后,所有配置参数都将被持久化到本地存储,重启数据库服务后,将默认使用持久化的配置参数列表;如果您希望继续使用 config 文件中配置的参数,需设置 forceReadConfig 为 1。
+2. 在 v3.2.0.0 ~ v3.3.0.0(不包含)生效,启用该参数后不能回退到升级前的版本
3. TSZ 压缩算法是通过数据预测技术完成的压缩,所以更适合有规律变化的数据
4. TSZ 压缩时间会更长一些,如果您的服务器 CPU 空闲多,存储空间小的情况下适合选用
5. 示例:对 float 和 double 类型都启用有损压缩
diff --git a/docs/zh/14-reference/03-taos-sql/10-function.md b/docs/zh/14-reference/03-taos-sql/10-function.md
index 0ac0250a29..9c8e2ebb20 100644
--- a/docs/zh/14-reference/03-taos-sql/10-function.md
+++ b/docs/zh/14-reference/03-taos-sql/10-function.md
@@ -1091,9 +1091,9 @@ CAST(expr AS type_name)
- 对于不能支持的类型转换会直接报错。
- 对于类型支持但某些值无法正确转换的情况,对应的转换后的值以转换函数输出为准。目前可能遇到的几种情况:
- 1)字符串类型转换数值类型时可能出现的无效字符情况,例如 "a" 可能转为 0,但不会报错。
- 2)转换到数值类型时,数值大于 type_name 可表示的范围时,则会溢出,但不会报错。
- 3)转换到字符串类型时,如果转换后长度超过 type_name 中指定的长度,则会截断,但不会报错。
+ - 字符串类型转换数值类型时可能出现的无效字符情况,例如 "a" 可能转为 0,但不会报错。
+ - 转换到数值类型时,数值大于 type_name 可表示的范围时,则会溢出,但不会报错。
+ - 转换到字符串类型时,如果转换后长度超过 type_name 中指定的长度,则会截断,但不会报错。
#### TO_ISO8601
@@ -1230,7 +1230,7 @@ TO_TIMESTAMP(ts_str_literal, format_str_literal)
**功能说明**:将字符串按照指定格式转化为时间戳。
-**使用说明**:ver-3.2.2.0
+**使用说明**:v3.2.2.0
**返回结果数据类型**:TIMESTAMP。
@@ -1568,7 +1568,7 @@ algo_type: {
**适用于**:表和超级表。
**说明**:
-- p 值范围是 [0,100],当为 0 时等同 于MIN,为 100 时等同于 MAX。
+- p 值范围是 [0,100],当为 0 时等同 于 MIN,为 100 时等同于 MAX。
- algo_type 取值为 "default" 或 "t-digest"。输入为 "default" 时函数使用基于直方图算法进行计算。输入为 "t-digest" 时使用 t-digest 算法计算分位数的近似结果。如果不指定 algo_type 则使用 "default" 算法。
- t-digest 算法的近似结果对于输入数据顺序敏感,对超级表查询时不同的输入排序结果可能会有微小的误差。
@@ -1884,12 +1884,12 @@ ignore_null_values: {
- INTERP 根据 FILL 字段来决定在每个符合输出条件的时刻如何进行插值。关于 FILL 子句如何使用请参考 [FILL 子句](../distinguished/#fill-子句)
- INTERP 可以在 RANGE 字段中只指定唯一的时间戳对单个时间点进行插值,在这种情况下,EVERY 字段可以省略。例如 SELECT INTERP(col) FROM tb RANGE('2023-01-01 00:00:00') FILL(linear)。
- INTERP 作用于超级表时,会将该超级表下的所有子表数据按照主键列排序后进行插值计算,也可以搭配 PARTITION BY tbname 使用,将结果强制规约到单个时间线。
-- INTERP 可以与伪列 _irowts 一起使用,返回插值点所对应的时间戳(3.0.2.0 版本以后支持)。
-- INTERP 可以与伪列 _isfilled 一起使用,显示返回结果是否为原始记录或插值算法产生的数据(3.0.3.0 版本以后支持)。
+- INTERP 可以与伪列 _irowts 一起使用,返回插值点所对应的时间戳(v3.0.2.0 以后支持)。
+- INTERP 可以与伪列 _isfilled 一起使用,显示返回结果是否为原始记录或插值算法产生的数据(v3.0.3.0 以后支持)。
- INTERP 对于带复合主键的表的查询,若存在相同时间戳的数据,则只有对应的复合主键最小的数据参与运算。
-- INTERP 查询支持 NEAR FILL 模式,即当需要 FILL 时,使用距离当前时间点最近的数据进行插值,当前后时间戳与当前时间断面一样近时,FILL 前一行的值。此模式在流计算中和窗口查询中不支持。例如 SELECT INTERP(col) FROM tb RANGE('2023-01-01 00:00:00', '2023-01-01 00:10:00') FILL(NEAR)(3.3.4.9 版本及以后支持)。
-- INTERP 只有在使用 FILL PREV/NEXT/NEAR 模式时才可以使用伪列 `_irowts_origin`。`_irowts_origin` 在 3.3.4.9 版本及以后支持。
-- INTERP `RANGE`子句从 3.3.4.9 版本开始支持时间范围的扩展,如 `RANGE('2023-01-01 00:00:00', 10s)` 表示只能使用时间点 '2023-01-01 00:00:00' 周边 10s 内的数据进行插值,FILL PREV/NEXT/NEAR 分别表示从时间点开始向前/向后/前后在时间范围内查找数据,若时间点周边在指定时间范围内没有数据,则使用 FILL 指定的默认值进行插值,因此此时 FILL 子句必须同时指定默认值。例如 SELECT INTERP(col) FROM tb RANGE('2023-01-01 00:00:00', 10s) FILL(PREV, 1)。从 3.3.6.0 版本开始支持时间区间和时间范围的组合,对于时间区间内的每个断面进行插值时都需要满足时间范围的要求,在此之前的版本仅支持时间点和时间范围的组合。时间范围的值域规则与 EVERY 类似,单位不能是年或月,值必须大于 0,不能带引号。使用该扩展时,不支持除 FILL PREV/NEXT/NEAR 外的其他 FILL 模式。
+- INTERP 查询支持 NEAR FILL 模式,即当需要 FILL 时,使用距离当前时间点最近的数据进行插值,当前后时间戳与当前时间断面一样近时,FILL 前一行的值。此模式在流计算中和窗口查询中不支持。例如 SELECT INTERP(col) FROM tb RANGE('2023-01-01 00:00:00', '2023-01-01 00:10:00') FILL(NEAR)(v3.3.4.9 及以后支持)。
+- INTERP 只有在使用 FILL PREV/NEXT/NEAR 模式时才可以使用伪列 `_irowts_origin`。`_irowts_origin` 在 v3.3.4.9 以后支持。
+- INTERP `RANGE`子句从 v3.3.4.9 开始支持时间范围的扩展,如 `RANGE('2023-01-01 00:00:00', 10s)` 表示只能使用时间点 '2023-01-01 00:00:00' 周边 10s 内的数据进行插值,FILL PREV/NEXT/NEAR 分别表示从时间点开始向前/向后/前后在时间范围内查找数据,若时间点周边在指定时间范围内没有数据,则使用 FILL 指定的默认值进行插值,因此此时 FILL 子句必须同时指定默认值。例如 SELECT INTERP(col) FROM tb RANGE('2023-01-01 00:00:00', 10s) FILL(PREV, 1)。从 v3.3.6.0 开始支持时间区间和时间范围的组合,对于时间区间内的每个断面进行插值时都需要满足时间范围的要求,在此之前的版本仅支持时间点和时间范围的组合。时间范围的值域规则与 EVERY 类似,单位不能是年或月,值必须大于 0,不能带引号。使用该扩展时,不支持除 `FILL PREV/NEXT/NEAR` 外的其他 FILL 模式。
### LAST
@@ -2032,8 +2032,8 @@ TOP(expr, k)
**使用说明**:
-- *k* 值取值范围 1≤*k*≤100;
-- 系统同时返回该记录关联的时间戳列;
+- *k* 值取值范围 1≤*k*≤100。
+- 系统同时返回该记录关联的时间戳列。
- 限制:TOP 函数不支持 FILL 子句。
### UNIQUE
@@ -2056,7 +2056,7 @@ UNIQUE(expr)
COLS(func(expr), output_expr1, [, output_expr2] ... )
```
-**功能说明**:在选择函数 func(expr) 执行结果所在数据行上,执行表达式 output_expr1, [, output_expr2],返回其结果,func(expr)结果不输出。
+**功能说明**:在选择函数 func(expr) 执行结果所在数据行上,执行表达式 output_expr1, [, output_expr2],返回其结果,func(expr) 结果不输出。
**返回数据类型**:返回多列数据,每列数据类型为对应表达式返回结果的类型。
@@ -2134,11 +2134,11 @@ ignore_option: {
}
```
-**功能说明**:统计表中特定列与之前行的当前列有效值之差。ignore_option 取值为 0|1|2|3,可以不填,默认值为 0.
-- `0` 表示不忽略(diff结果)负值不忽略 null 值
-- `1` 表示(diff结果)负值作为 null 值
-- `2` 表示不忽略(diff结果)负值但忽略 null 值
-- `3` 表示忽略(diff结果)负值且忽略 null 值
+**功能说明**:统计表中特定列与之前行的当前列有效值之差。ignore_option 取值为 0|1|2|3,可以不填,默认值为 0。
+- `0` 表示 diff 结果不忽略负值不忽略 null 值
+- `1` 表示 diff 结果的负值作为 null 值
+- `2` 表示 diff 结果不忽略负值但忽略 null 值
+- `3` 表示 diff 结果忽略负值且忽略 null 值
- 对于存在复合主键的表的查询,若时间戳相同的数据存在多条,则只有对应的复合主键最小的数据参与运算。
**返回数据类型**:bool、时间戳及整型数值类型均返回 bigint,浮点类型返回 double,若 diff 结果溢出则返回溢出后的值。
@@ -2150,13 +2150,13 @@ ignore_option: {
**使用说明**:
- diff 是计算本行特定列与同列的前一个有效数据的差值,同列的前一个有效数据:指的是同一列中时间戳较小的最临近的非空值。
-- 数值类型 diff 结果为对应的算术差值;时间戳类型根据数据库的时间戳精度进行差值计算;bool 类型计算差值时 true 视为 1,false 视为 0
-- 如当前行数据为 null 或者没有找到同列前一个有效数据时,diff 结果为 null
-- 忽略负值时(ignore_option 设置为 1 或 3 ),如果 diff 结果为负值,则结果设置为 null,然后根据 null 值过滤规则进行过滤
-- 当 diff 结果发生溢出时,结果是否是 `应该忽略的负值` 取决于逻辑运算结果是正数还是负数,例如 9223372036854775800 - (-9223372036854775806) 的值超出 BIGINT 的范围,diff 结果会显示溢出值 -10,但并不会被作为负值忽略
-- 单个语句中可以使用单个或者多个 diff,并且每个 diff 可以指定相同或不同的 ignore_option,当单个语句中存在多个 diff 时当且仅当某行所有 diff 的结果都为 null,并且 ignore_option 都设置为忽略 null 值,该行才从结果集中剔除
+- 数值类型 diff 结果为对应的算术差值;时间戳类型根据数据库的时间戳精度进行差值计算;bool 类型计算差值时 true 视为 1,false 视为 0。
+- 如当前行数据为 null 或者没有找到同列前一个有效数据时,diff 结果为 null。
+- 忽略负值时(ignore_option 设置为 1 或 3 ),如果 diff 结果为负值,则结果设置为 null,然后根据 null 值过滤规则进行过滤。
+- 当 diff 结果发生溢出时,结果是否是 `应该忽略的负值` 取决于逻辑运算结果是正数还是负数,例如 9223372036854775800 - (-9223372036854775806) 的值超出 BIGINT 的范围,diff 结果会显示溢出值 -10,但并不会被作为负值忽略。
+- 单个语句中可以使用单个或者多个 diff,并且每个 diff 可以指定相同或不同的 ignore_option,当单个语句中存在多个 diff 时当且仅当某行所有 diff 的结果都为 null,并且 ignore_option 都设置为忽略 null 值,该行才从结果集中剔除。
- 可以选择与相关联的列一起使用。例如 `select _rowts, DIFF() from`。
-- 当没有复合主键时,如果不同的子表有相同时间戳的数据,会提示 "Duplicate timestamps not allowed"
+- 当没有复合主键时,如果不同的子表有相同时间戳的数据,会提示 "Duplicate timestamps not allowed"。
- 当使用复合主键时,不同子表的时间戳和主键组合可能相同,使用哪一行取决于先找到哪一行,这意味着在这种情况下多次运行 diff() 的结果可能会不同。
### IRATE
@@ -2232,7 +2232,7 @@ STATEDURATION(expr, oper, val, unit)
**参数范围**:
-- oper:`'LT'` (小于)、`'GT'`(大于)、`'LE'`(小于等于)、`'GE'`(大于等于)、`'NE'`(不等于)、`'EQ'`(等于),不区分大小写,但需要用`''`包括。
+- oper:`'LT'` (小于)、`'GT'`(大于)、`'LE'`(小于等于)、`'GE'`(大于等于)、`'NE'`(不等于)、`'EQ'`(等于),不区分大小写,但需要用 `''` 包括。
- val:数值型
- unit:时间长度的单位,可取值时间单位:1b(纳秒)、1u(微秒)、1a(毫秒)、1s(秒)、1m(分)、1h(小时)、1d(天)、1w(周)。如果省略,默认为当前数据库精度。
diff --git a/docs/zh/14-reference/03-taos-sql/12-distinguished.md b/docs/zh/14-reference/03-taos-sql/12-distinguished.md
index ffca617f7a..34959996c2 100644
--- a/docs/zh/14-reference/03-taos-sql/12-distinguished.md
+++ b/docs/zh/14-reference/03-taos-sql/12-distinguished.md
@@ -76,7 +76,7 @@ window_clause: {
FILL 语句指定某一窗口区间数据缺失的情况下的填充模式。填充模式包括以下几种:
1. 不进行填充:NONE(默认填充模式)。
-2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如 FILL(VALUE, 1.23)。这里需要注意,最终填充的值受由相应列的类型决定,如 FILL(VALUE, 1.23),相应列为 INT 类型,则填充值为 1,若查询列表中有多列需要 FILL,则需要给每一个 FILL 列指定 VALUE,如 `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`,注意,SELECT 表达式中只有包含普通列时才需要指定 FILL VALUE,如 `_wstart`、`_wstart+1a`、`now`、`1+1` 以及使用 partition by 时的 partition key (如 tbname)都不需要指定 VALUE,如 `timediff(last(ts), _wstart)` 则需要指定 VALUE。
+2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如 `FILL(VALUE, 1.23)`。这里需要注意,最终填充的值受由相应列的类型决定,如 `FILL(VALUE, 1.23)`,相应列为 INT 类型,则填充值为 1,若查询列表中有多列需要 FILL,则需要给每一个 FILL 列指定 VALUE,如 `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`,注意,SELECT 表达式中只有包含普通列时才需要指定 FILL VALUE,如 `_wstart`、`_wstart+1a`、`now`、`1+1` 以及使用 `partition by` 时的 `partition key` (如 tbname)都不需要指定 VALUE,如 `timediff(last(ts), _wstart)` 则需要指定 VALUE。
3. PREV 填充:使用前一个非 NULL 值填充数据。例如 FILL(PREV)。
4. NULL 填充:使用 NULL 填充数据。例如 FILL(NULL)。
5. LINEAR 填充:根据前后距离最近的非 NULL 值做线性插值填充。例如 FILL(LINEAR)。
@@ -165,7 +165,7 @@ TDengine 还支持将 CASE 表达式用在状态量,可以表达某个状态
SELECT tbname, _wstart, CASE WHEN voltage >= 205 and voltage <= 235 THEN 1 ELSE 0 END status FROM meters PARTITION BY tbname STATE_WINDOW(CASE WHEN voltage >= 205 and voltage <= 235 THEN 1 ELSE 0 END);
```
-状态窗口支持使用 TRUE_FOR 参数来设定窗口的最小持续时长。如果某个状态窗口的宽度低于该设定值,则会自动舍弃,不返回任何计算结果。例如,设置最短持续时长为 3s:
+状态窗口支持使用 TRUE_FOR 参数来设定窗口的最小持续时长。如果某个状态窗口的宽度低于该设定值,则会自动舍弃,不返回任何计算结果。例如,设置最短持续时长为 3s。
```
SELECT COUNT(*), FIRST(ts), status FROM temp_tb_1 STATE_WINDOW(status) TRUE_FOR (3s);
@@ -202,7 +202,7 @@ select _wstart, _wend, count(*) from t event_window start with c1 > 0 end with c

-事件窗口支持使用 TRUE_FOR 参数来设定窗口的最小持续时长。如果某个事件窗口的宽度低于该设定值,则会自动舍弃,不返回任何计算结果。例如,设置最短持续时长为 3s:
+事件窗口支持使用 TRUE_FOR 参数来设定窗口的最小持续时长。如果某个事件窗口的宽度低于该设定值,则会自动舍弃,不返回任何计算结果。例如,设置最短持续时长为 3s。
```
select _wstart, _wend, count(*) from t event_window start with c1 > 0 end with c2 < 10 true_for (3s);
@@ -223,7 +223,7 @@ select _wstart, _wend, count(*) from t count_window(4);
### 时间戳伪列
-窗口聚合查询结果中,如果 SQL 语句中没有指定输出查询结果中的时间戳列,那么最终结果中不会自动包含窗口的时间列信息。如果需要在结果中输出聚合结果所对应的时间窗口信息,需要在 SELECT 子句中使用时间戳相关的伪列: 时间窗口起始时间 (\_WSTART), 时间窗口结束时间 (\_WEND), 时间窗口持续时间 (\_WDURATION), 以及查询整体窗口相关的伪列:查询窗口起始时间(\_QSTART) 和查询窗口结束时间(\_QEND)。需要注意的是时间窗口起始时间和结束时间均是闭区间,时间窗口持续时间是数据当前时间分辨率下的数值。例如,如果当前数据库的时间分辨率是毫秒,那么结果中 500 就表示当前时间窗口的持续时间是 500毫秒 (500 ms)。
+窗口聚合查询结果中,如果 SQL 语句中没有指定输出查询结果中的时间戳列,那么最终结果中不会自动包含窗口的时间列信息。如果需要在结果中输出聚合结果所对应的时间窗口信息,需要在 SELECT 子句中使用时间戳相关的伪列:时间窗口起始时间 (\_WSTART),时间窗口结束时间 (\_WEND),时间窗口持续时间 (\_WDURATION),以及查询整体窗口相关的伪列:查询窗口起始时间(\_QSTART) 和查询窗口结束时间(\_QEND)。需要注意的是时间窗口起始时间和结束时间均是闭区间,时间窗口持续时间是数据当前时间分辨率下的数值。例如,如果当前数据库的时间分辨率是毫秒,那么结果中 500 就表示当前时间窗口的持续时间是 500毫秒 (500 ms)。
### 示例
diff --git a/docs/zh/14-reference/03-taos-sql/14-stream.md b/docs/zh/14-reference/03-taos-sql/14-stream.md
index c8c490dd84..9359f85915 100644
--- a/docs/zh/14-reference/03-taos-sql/14-stream.md
+++ b/docs/zh/14-reference/03-taos-sql/14-stream.md
@@ -300,7 +300,7 @@ RESUME STREAM [IF EXISTS] [IGNORE UNTREATED] stream_name;
CREATE SNODE ON DNODE [id]
```
其中的 id 是集群中的 dnode 的序号。请注意选择的dnode,流计算的中间状态将自动在其上进行备份。
-从 3.3.4.0 版本开始,在多副本环境中创建流会进行 snode 的**存在性检查**,要求首先创建 snode。如果 snode 不存在,无法创建流。
+从 v3.3.4.0 开始,在多副本环境中创建流会进行 snode 的**存在性检查**,要求首先创建 snode。如果 snode 不存在,无法创建流。
## 流式计算的事件通知
From 59ef7cd3be5e1b07eefa241c2698786f7fd02b54 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 09:01:02 +0800
Subject: [PATCH 076/105] test: add csv interlace case
---
tests/army/tools/benchmark/basic/exportCsv.py | 193 ++++++++++++++----
1 file changed, 153 insertions(+), 40 deletions(-)
diff --git a/tests/army/tools/benchmark/basic/exportCsv.py b/tests/army/tools/benchmark/basic/exportCsv.py
index b8b3828ea6..fececcf7f2 100644
--- a/tests/army/tools/benchmark/basic/exportCsv.py
+++ b/tests/army/tools/benchmark/basic/exportCsv.py
@@ -13,6 +13,7 @@
import os
import json
import csv
+import datetime
import frame
import frame.etool
@@ -26,80 +27,192 @@ from frame import *
class TDTestCase(TBase):
def caseDescription(self):
"""
- [TD-11510] taosBenchmark test cases
- """
- # check correct
- def checkCorrect(self, csvFile, allRows, interlaceRows):
- # open as csv
- count = 0
- batch = 0
- name = ""
- with open(csvFile) as file:
+ [TS-5089] taosBenchmark support exporting csv
+ """
+
+
+ def clear_directory(self, target_dir: str = 'csv'):
+ try:
+ if not os.path.exists(target_dir):
+ return
+ for entry in os.listdir(target_dir):
+ entry_path = os.path.join(target_dir, entry)
+ if os.path.isfile(entry_path) or os.path.islink(entry_path):
+ os.unlink(entry_path)
+ else:
+ shutil.rmtree(entry_path)
+
+ tdLog.debug("clear succ, dir: %s " % (target_dir))
+ except OSError as e:
+ tdLog.exit("clear fail, dir: %s " % (target_dir))
+
+
+ def convert_timestamp(self, ts, ts_format):
+ dt_object = datetime.datetime.fromtimestamp(ts / 1000)
+ formatted_time = dt_object.strftime(ts_format)
+ return formatted_time
+
+
+ def calc_time_slice_partitions(self, total_start_ts, total_end_ts, ts_step, childs, ts_format, ts_interval):
+ interval_days = int(ts_interval[:-1])
+ n_days_millis = interval_days * 24 * 60 * 60 * 1000
+
+ dt_start = datetime.datetime.fromtimestamp(total_start_ts / 1000.0)
+ formatted_str = dt_start.strftime(ts_format)
+ s0_dt = datetime.datetime.strptime(formatted_str, ts_format)
+ s0 = int(s0_dt.timestamp() * 1000)
+
+ partitions = []
+ current_s = s0
+
+ while current_s <= total_end_ts:
+ current_end = current_s + n_days_millis
+ start_actual = max(current_s, total_start_ts)
+ end_actual = min(current_end, total_end_ts)
+
+ if start_actual >= end_actual:
+ count = 0
+ else:
+ delta = end_actual - start_actual
+ delta
+ delta_start = start_actual - total_start_ts
+ delta_end = end_actual - total_start_ts
+ if delta % ts_step:
+ count = delta // ts_step + 1
+ else:
+ count = delta // ts_step
+
+ count *= childs
+
+ partitions.append({
+ "start_ts": current_s,
+ "end_ts": current_end,
+ "start_time": self.convert_timestamp(current_s, ts_format),
+ "end_time": self.convert_timestamp(current_end, ts_format),
+ "count": count
+ })
+
+ current_s += n_days_millis
+
+ # partitions = [p for p in partitions if p['count'] > 0]
+ return partitions
+
+
+ def check_stb_csv_correct(self, csv_file_name, all_rows, interlace_rows):
+ # open as csv
+ tbname_idx = 14
+ count = 0
+ batch = 0
+ name = ""
+ header = True
+ with open(csv_file_name) as file:
rows = csv.reader(file)
for row in rows:
- # interlaceRows
+ if header:
+ header = False
+ continue
+
+ # interlace_rows
if name == "":
- name = row[0]
+ name = row[tbname_idx]
batch = 1
else:
- if name == row[0]:
+ if name == row[tbname_idx]:
batch += 1
else:
# switch to another child table
- if batch != interlaceRows:
- tdLog.exit(f"interlaceRows invalid. tbName={name} real={batch} expect={interlaceRows} i={count} csvFile={csvFile}")
+ if batch != interlace_rows:
+ tdLog.exit(f"interlace_rows invalid. tbName={name} actual={batch} expected={interlace_rows} i={count} csv_file_name={csv_file_name}")
batch = 1
- name = row[0]
+ name = row[tbname_idx]
# count ++
count += 1
# batch
- if batch != interlaceRows:
- tdLog.exit(f"interlaceRows invalid. tbName={name} real={batch} expect={interlaceRows} i={count} csvFile={csvFile}")
+ if batch != interlace_rows:
+ tdLog.exit(f"interlace_rows invalid. tbName={name} actual={batch} expected={interlace_rows} i={count} csv_file_name={csv_file_name}")
# check all rows
- if count != allRows:
- tdLog.exit(f"allRows invalid. real={count} expect={allRows} csvFile={csvFile}")
+ if count != all_rows:
+ tdLog.exit(f"all_rows invalid. actual={count} expected={all_rows} csv_file_name={csv_file_name}")
- tdLog.info(f"Check generate csv file successfully. csvFile={csvFile} count={count} interlaceRows={batch}")
+ tdLog.info(f"Check generate csv file successfully. csv_file_name={csv_file_name} count={count} interlace_rows={batch}")
+
+
+ # check correct
+ def check_stb_correct(self, data, db, stb):
+ filepath = data["output_path"]
+ stbName = stb["name"]
+ childs = stb["childtable_to"] - stb["childtable_from"]
+ insert_rows = stb["insert_rows"]
+ interlace_rows = stb["interlace_rows"]
+ csv_file_prefix = stb["csv_file_prefix"]
+ csv_ts_format = stb.get("csv_ts_format", None)
+ csv_ts_interval = stb.get("csv_ts_interval", None)
+
+ ts_step = stb["timestamp_step"]
+ total_start_ts = stb["start_timestamp"]
+ total_end_ts = total_start_ts + ts_step * insert_rows
+
+
+ all_rows = childs * insert_rows
+ if interlace_rows > 0:
+ # interlace
+
+ if not csv_ts_format:
+ # normal
+ csv_file_name = f"{filepath}{csv_file_prefix}.csv"
+ self.check_stb_csv_correct(csv_file_name, all_rows, interlace_rows)
+ else:
+ # time slice
+ partitions = self.calc_time_slice_partitions(total_start_ts, total_end_ts, ts_step, childs, csv_ts_format, csv_ts_interval)
+ for part in partitions:
+ csv_file_name = f"{filepath}{csv_file_prefix}_{part['start_time']}_{part['end_time']}.csv"
+ self.check_stb_csv_correct(csv_file_name, part['count'], interlace_rows)
+ else:
+ # batch
+ interlace_rows = insert_rows
+ if not csv_ts_format:
+ # normal
+ pass
+ else:
+ # time slice
+ pass
+
# check result
- def checResult(self, jsonFile):
+ def check_result(self, jsonFile):
# csv
with open(jsonFile) as file:
data = json.load(file)
- # read json
+ # read json
database = data["databases"][0]
- out = data["csvPath"]
- dbName = database["dbinfo"]["name"]
stables = database["super_tables"]
- for stable in stables:
- stbName = stable["name"]
- childs = stable["childtable_count"]
- insertRows = stable["insert_rows"]
- interlaceRows = stable["interlace_rows"]
- csvFile = f"{out}{dbName}-{stbName}.csv"
- rows = childs * insertRows
- if interlaceRows == 0:
- interlaceRows = insertRows
- # check csv context correct
- self.checkCorrect(csvFile, rows, interlaceRows)
- def checkExportCsv(self, benchmark, jsonFile, options=""):
+ for stable in stables:
+ # check csv context correct
+ self.check_stb_correct(data, database, stable)
+
+
+ def check_export_csv(self, benchmark, jsonFile, options=""):
+ # clear
+ self.clear_directory()
+
# exec
cmd = f"{benchmark} {options} -f {jsonFile}"
os.system(cmd)
# check result
- self.checResult(jsonFile)
+ self.check_result(jsonFile)
+
def run(self):
# path
benchmark = etool.benchMarkFile()
- # do check
- json = "tools/benchmark/basic/json/exportCsv.json"
- self.checkExportCsv(benchmark, json)
+ # do check interlace normal
+ json = "tools/benchmark/basic/json/csv-interlace-normal.json"
+ self.check_export_csv(benchmark, json)
def stop(self):
tdSql.close()
From 2c29c1a3fd4aa60389063a04b6d154361e607c43 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 09:55:57 +0800
Subject: [PATCH 077/105] test: add csv exporting batch case
---
.../basic/{exportCsv.py => csv-export.py} | 37 +++++++++++++------
tests/parallel_test/cases.task | 3 +-
2 files changed, 27 insertions(+), 13 deletions(-)
rename tests/army/tools/benchmark/basic/{exportCsv.py => csv-export.py} (82%)
diff --git a/tests/army/tools/benchmark/basic/exportCsv.py b/tests/army/tools/benchmark/basic/csv-export.py
similarity index 82%
rename from tests/army/tools/benchmark/basic/exportCsv.py
rename to tests/army/tools/benchmark/basic/csv-export.py
index fececcf7f2..6b71d2c238 100644
--- a/tests/army/tools/benchmark/basic/exportCsv.py
+++ b/tests/army/tools/benchmark/basic/csv-export.py
@@ -53,7 +53,7 @@ class TDTestCase(TBase):
return formatted_time
- def calc_time_slice_partitions(self, total_start_ts, total_end_ts, ts_step, childs, ts_format, ts_interval):
+ def calc_time_slice_partitions(self, total_start_ts, total_end_ts, ts_step, ts_format, ts_interval):
interval_days = int(ts_interval[:-1])
n_days_millis = interval_days * 24 * 60 * 60 * 1000
@@ -82,8 +82,6 @@ class TDTestCase(TBase):
else:
count = delta // ts_step
- count *= childs
-
partitions.append({
"start_ts": current_s,
"end_ts": current_end,
@@ -142,7 +140,7 @@ class TDTestCase(TBase):
def check_stb_correct(self, data, db, stb):
filepath = data["output_path"]
stbName = stb["name"]
- childs = stb["childtable_to"] - stb["childtable_from"]
+ child_count = stb["childtable_to"] - stb["childtable_from"]
insert_rows = stb["insert_rows"]
interlace_rows = stb["interlace_rows"]
csv_file_prefix = stb["csv_file_prefix"]
@@ -154,7 +152,7 @@ class TDTestCase(TBase):
total_end_ts = total_start_ts + ts_step * insert_rows
- all_rows = childs * insert_rows
+ all_rows = child_count * insert_rows
if interlace_rows > 0:
# interlace
@@ -164,20 +162,35 @@ class TDTestCase(TBase):
self.check_stb_csv_correct(csv_file_name, all_rows, interlace_rows)
else:
# time slice
- partitions = self.calc_time_slice_partitions(total_start_ts, total_end_ts, ts_step, childs, csv_ts_format, csv_ts_interval)
+ partitions = self.calc_time_slice_partitions(total_start_ts, total_end_ts, ts_step, csv_ts_format, csv_ts_interval)
for part in partitions:
csv_file_name = f"{filepath}{csv_file_prefix}_{part['start_time']}_{part['end_time']}.csv"
- self.check_stb_csv_correct(csv_file_name, part['count'], interlace_rows)
+ self.check_stb_csv_correct(csv_file_name, part['count'] * child_count, interlace_rows)
else:
# batch
- interlace_rows = insert_rows
+ thread_count = stb["thread_count"]
+ interlace_rows = insert_rows
if not csv_ts_format:
# normal
- pass
+ for i in range(thread_count):
+ csv_file_name = f"{filepath}{csv_file_prefix}_{i + 1}.csv"
+ if i < child_count % thread_count:
+ self.check_stb_csv_correct(csv_file_name, insert_rows * (child_count // thread_count + 1), interlace_rows)
+ else:
+ self.check_stb_csv_correct(csv_file_name, insert_rows * (child_count // thread_count), interlace_rows)
else:
# time slice
- pass
-
+ for i in range(thread_count):
+ partitions = self.calc_time_slice_partitions(total_start_ts, total_end_ts, ts_step, csv_ts_format, csv_ts_interval)
+ for part in partitions:
+ csv_file_name = f"{filepath}{csv_file_prefix}_{i + 1}_{part['start_time']}_{part['end_time']}.csv"
+ if i < child_count % thread_count:
+ slice_rows = part['count'] * (child_count // thread_count + 1)
+ else:
+ slice_rows = part['count'] * (child_count // thread_count)
+
+ self.check_stb_csv_correct(csv_file_name, slice_rows, part['count'])
+
# check result
def check_result(self, jsonFile):
@@ -211,7 +224,7 @@ class TDTestCase(TBase):
benchmark = etool.benchMarkFile()
# do check interlace normal
- json = "tools/benchmark/basic/json/csv-interlace-normal.json"
+ json = "tools/benchmark/basic/json/csv-export.json"
self.check_export_csv(benchmark, json)
def stop(self):
diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task
index 8f986ad445..29d8a1b147 100644
--- a/tests/parallel_test/cases.task
+++ b/tests/parallel_test/cases.task
@@ -91,7 +91,8 @@
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/default_json.py
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/demo.py
-,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/exportCsv.py
+,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/csv-export.py
+# ,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/csv-import.py
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to.py
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/from-to-continue.py
From aff63d0799a67987cdabed9a89132bd55db62f03 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 13:40:57 +0800
Subject: [PATCH 078/105] refactor: cmake files
---
CMakeLists.txt | 25 +++++++++----------------
1 file changed, 9 insertions(+), 16 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index db5b89db3d..2be056ec4e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -11,36 +11,29 @@ if(NOT DEFINED TD_SOURCE_DIR)
endif()
SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
-
set(TD_SUPPORT_DIR "${TD_SOURCE_DIR}/cmake")
set(TD_CONTRIB_DIR "${TD_SOURCE_DIR}/contrib")
include(${TD_SUPPORT_DIR}/cmake.platform)
-include(${TD_SUPPORT_DIR}/cmake.define)
include(${TD_SUPPORT_DIR}/cmake.options)
+include(${TD_SUPPORT_DIR}/cmake.define)
include(${TD_SUPPORT_DIR}/cmake.version)
-
-# contrib
-add_subdirectory(contrib)
+include(${TD_SUPPORT_DIR}/cmake.install)
set_property(GLOBAL PROPERTY GLOBAL_DEPENDS_NO_CYCLES OFF)
-# api
add_library(api INTERFACE)
target_include_directories(api INTERFACE "include/client")
-# src
-if(${BUILD_TEST})
- include(CTest)
- enable_testing()
-endif(${BUILD_TEST})
-
+add_subdirectory(contrib)
add_subdirectory(source)
add_subdirectory(tools)
add_subdirectory(utils)
-add_subdirectory(examples/c)
add_subdirectory(tests)
-include(${TD_SUPPORT_DIR}/cmake.install)
-
-# docs
add_subdirectory(docs/doxgen)
+
+if(${BUILD_TEST})
+ include(CTest)
+ enable_testing()
+ add_subdirectory(examples/c)
+endif(${BUILD_TEST})
\ No newline at end of file
From e59db69c240b72aa59ba4d39d753740e0dccbef3 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 13:46:15 +0800
Subject: [PATCH 079/105] refactor: cmake files
---
utils/test/c/CMakeLists.txt | 11 ++++++++++-
1 file changed, 10 insertions(+), 1 deletion(-)
diff --git a/utils/test/c/CMakeLists.txt b/utils/test/c/CMakeLists.txt
index b68476add8..f9e2938f56 100644
--- a/utils/test/c/CMakeLists.txt
+++ b/utils/test/c/CMakeLists.txt
@@ -1,5 +1,4 @@
add_executable(tmq_demo tmqDemo.c)
-add_dependencies(tmq_demo ${TAOS_LIB})
add_executable(tmq_sim tmqSim.c)
add_executable(create_table createTable.c)
add_executable(tmq_taosx_ci tmq_taosx_ci.c)
@@ -48,6 +47,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_demo
PUBLIC ${TAOS_LIB}
@@ -55,6 +55,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_sim
PUBLIC ${TAOS_LIB_PLATFORM_SPEC}
@@ -62,6 +63,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5466
PUBLIC ${TAOS_LIB}
@@ -69,6 +71,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32187
PUBLIC ${TAOS_LIB}
@@ -76,6 +79,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td32471
PUBLIC ${TAOS_LIB}
@@ -83,6 +87,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_td33798
PUBLIC ${TAOS_LIB}
@@ -90,6 +95,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_poll_test
PUBLIC ${TAOS_LIB}
@@ -104,6 +110,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_ts5776
PUBLIC ${TAOS_LIB}
@@ -111,6 +118,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_taosx_ci
PUBLIC ${TAOS_LIB}
@@ -118,6 +126,7 @@ target_link_libraries(
PUBLIC common
PUBLIC os
)
+
target_link_libraries(
tmq_offset_test
PUBLIC ${TAOS_LIB}
From e321a3be12ad5d960f02a1b79323c7ee6dab1d55 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 13:49:31 +0800
Subject: [PATCH 080/105] refactor: cmake files
---
source/libs/function/test/runUdf.c | 5 +----
tests/script/tsim/sync/3Replica1VgElect.sim | 17 +++++++----------
utils/CMakeLists.txt | 5 ++++-
3 files changed, 12 insertions(+), 15 deletions(-)
diff --git a/source/libs/function/test/runUdf.c b/source/libs/function/test/runUdf.c
index f28b44d1b8..39f547cc06 100644
--- a/source/libs/function/test/runUdf.c
+++ b/source/libs/function/test/runUdf.c
@@ -1,6 +1,3 @@
-#include
-#include
-#include
#include "uv.h"
#include "fnLog.h"
@@ -218,4 +215,4 @@ int main(int argc, char *argv[]) {
fnError("failed to close udfc");
return -1;
}
-}
+}
\ No newline at end of file
diff --git a/tests/script/tsim/sync/3Replica1VgElect.sim b/tests/script/tsim/sync/3Replica1VgElect.sim
index 6ebee885a8..353ec2c792 100644
--- a/tests/script/tsim/sync/3Replica1VgElect.sim
+++ b/tests/script/tsim/sync/3Replica1VgElect.sim
@@ -91,19 +91,19 @@ endi
if $data[0][4] == leader then
if $data[0][7] == follower then
if $data[0][10] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][3]
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][4]
endi
endi
-elif $data[0][6] == leader then
- if $data[0][7] == follower then
- if $data[0][10] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][5]
+elif $data[0][7] == leader then
+ if $data[0][10] == follower then
+ if $data[0][4] == follower then
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][7]
endi
endi
elif $data[0][10] == leader then
if $data[0][4] == follower then
if $data[0][7] == follower then
- print ---- vgroup $data[0][0] leader locate on dnode $data[0][7]
+ print ---- vgroup $data[0][0] leader locate on dnode $data[0][10]
endi
endi
else
@@ -461,7 +461,4 @@ endi
system sh/exec.sh -n dnode1 -s stop -x SIGINT
system sh/exec.sh -n dnode2 -s stop -x SIGINT
system sh/exec.sh -n dnode3 -s stop -x SIGINT
-system sh/exec.sh -n dnode4 -s stop -x SIGINT
-
-
-
+system sh/exec.sh -n dnode4 -s stop -x SIGINT
\ No newline at end of file
diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt
index 9872a9dc55..721da32bfc 100644
--- a/utils/CMakeLists.txt
+++ b/utils/CMakeLists.txt
@@ -1,6 +1,9 @@
# ADD_SUBDIRECTORY(examples/c)
ADD_SUBDIRECTORY(tsim)
-ADD_SUBDIRECTORY(test/c)
+
+IF(${BUILD_TEST})
+ ADD_SUBDIRECTORY(test/c)
+ENDIF(${BUILD_TEST})
# ADD_SUBDIRECTORY(comparisonTest/tdengine)
IF(NOT "${TSZ_ENABLED}" MATCHES "false")
From cc220ca3aee7192125df15f8e0a633a54e5bdcea Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 14:10:56 +0800
Subject: [PATCH 081/105] feat: change the output path parameter to output_dir
---
tests/army/tools/benchmark/basic/csv-export.py | 6 +++---
tools/taos-tools/src/benchCsv.c | 8 ++++----
tools/taos-tools/src/benchJsonOpt.c | 2 +-
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/tests/army/tools/benchmark/basic/csv-export.py b/tests/army/tools/benchmark/basic/csv-export.py
index 6b71d2c238..702490d6ed 100644
--- a/tests/army/tools/benchmark/basic/csv-export.py
+++ b/tests/army/tools/benchmark/basic/csv-export.py
@@ -122,7 +122,7 @@ class TDTestCase(TBase):
if batch != interlace_rows:
tdLog.exit(f"interlace_rows invalid. tbName={name} actual={batch} expected={interlace_rows} i={count} csv_file_name={csv_file_name}")
batch = 1
- name = row[tbname_idx]
+ name = row[tbname_idx]
# count ++
count += 1
# batch
@@ -136,9 +136,9 @@ class TDTestCase(TBase):
tdLog.info(f"Check generate csv file successfully. csv_file_name={csv_file_name} count={count} interlace_rows={batch}")
- # check correct
+ # check correct
def check_stb_correct(self, data, db, stb):
- filepath = data["output_path"]
+ filepath = data["output_dir"]
stbName = stb["name"]
child_count = stb["childtable_to"] - stb["childtable_from"]
insert_rows = stb["insert_rows"]
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index b498214468..0bb47b0888 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -371,7 +371,7 @@ int csvGenCreateDbSql(SDataBase* db, char* buf, int size) {
static int csvExportCreateDbSql(CsvWriteMeta* write_meta, FILE* fp) {
- char buf[LARGE_BUFF_LEN] = {};
+ char buf[LARGE_BUFF_LEN] = {0};
int ret = 0;
int length = 0;
@@ -547,7 +547,7 @@ int csvGenCreateStbSql(SDataBase* db, SSuperTable* stb, char* buf, int size) {
static int csvExportCreateStbSql(CsvWriteMeta* write_meta, FILE* fp) {
- char buf[4096] = {};
+ char buf[4096] = {0};
int ret = 0;
int length = 0;
@@ -572,7 +572,7 @@ static int csvExportCreateStbSql(CsvWriteMeta* write_meta, FILE* fp) {
static int csvExportCreateSql(CsvWriteMeta* write_meta) {
- char fullname[MAX_PATH_LEN] = {};
+ char fullname[MAX_PATH_LEN] = {0};
int ret = 0;
int length = 0;
FILE* fp = NULL;
@@ -1078,7 +1078,7 @@ static void* csvGenStbThread(void* arg) {
uint64_t ctb_idx = 0;
int ret = 0;
CsvFileHandle* fhdl = NULL;
- char fullname[MAX_PATH_LEN] = {};
+ char fullname[MAX_PATH_LEN] = {0};
uint64_t total_rows = 0;
uint64_t pre_total_rows = 0;
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 83edc5c6ef..49b5a6529d 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1646,7 +1646,7 @@ static int getMetaFromCommonJsonFile(tools_cJSON *json) {
}
// output dir
- tools_cJSON* opp = tools_cJSON_GetObjectItem(json, "output_path");
+ tools_cJSON* opp = tools_cJSON_GetObjectItem(json, "output_dir");
if (opp && opp->type == tools_cJSON_String && opp->valuestring != NULL) {
g_arguments->output_path = opp->valuestring;
} else {
From 39a4444124c164180bee3e56f3d8d854415ee5df Mon Sep 17 00:00:00 2001
From: haoranchen
Date: Thu, 6 Mar 2025 14:29:58 +0800
Subject: [PATCH 082/105] Update 12-multi.md
---
docs/zh/08-operation/12-multi.md | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/docs/zh/08-operation/12-multi.md b/docs/zh/08-operation/12-multi.md
index 7a8a429783..6afc7f59b4 100644
--- a/docs/zh/08-operation/12-multi.md
+++ b/docs/zh/08-operation/12-multi.md
@@ -124,8 +124,8 @@ s3migrate database ;
当 TSDB 时序数据超过 `s3_keeplocal` 参数指定的时间,相关的数据文件会被切分成多个文件块,每个文件块的默认大小是 512M 字节 (`s3_chunkpages * tsdb_pagesize`)。除了最后一个文件块保留在本地文件系统外,其余的文件块会被上传到对象存储服务。
-```math
-\text{上传次数} = \frac{\text{数据文件大小}}{\text{s3\_chunkpages} \times \text{tsdb\_pagesize}} - 1
+```text
+上传次数 = 数据文件大小 / (s3_chunkpages * tsdb_pagesize) - 1
```
在创建数据库时,可以通过 `s3_chunkpages` 参数调整每个文件块的大小,从而控制每个数据文件的上传次数。
@@ -138,8 +138,8 @@ s3migrate database ;
相邻的多个数据页会作为一个数据块从对象存储下载一次,以减少从对象存储下载的次数。每个数据页的大小,在创建数据库时,通过 `tsdb_pagesize` 参数指定,默认 4K 字节。
-```math
-\text{下载次数} = \text{查询需要的数据块数量} - \text{已缓存的数据块数量}
+```text
+下载次数 = 查询需要的数据块数量 - 已缓存的数据块数量
```
页缓存是内存缓存,节点重启后,再次查询需要重新下载数据。缓存采用 LRU (Least Recently Used) 策略,当缓存空间不足时,最近最少使用的数据将被淘汰。缓存的大小可以通过 `s3PageCacheSize` 参数进行调整,通常来说,缓存越大,下载次数越少。
From 952053598cd03c5e8abfa775e4211a3e5386e28e Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 6 Mar 2025 14:47:44 +0800
Subject: [PATCH 083/105] enh: TD-33960-improve-sync-log
---
source/dnode/vnode/src/vnd/vnodeSync.c | 9 ++++++---
source/libs/sync/src/syncPipeline.c | 6 ++++--
2 files changed, 10 insertions(+), 5 deletions(-)
diff --git a/source/dnode/vnode/src/vnd/vnodeSync.c b/source/dnode/vnode/src/vnd/vnodeSync.c
index a7e8a43fae..2a06686a31 100644
--- a/source/dnode/vnode/src/vnd/vnodeSync.c
+++ b/source/dnode/vnode/src/vnd/vnodeSync.c
@@ -169,8 +169,9 @@ void vnodeProposeCommitOnNeed(SVnode *pVnode, bool atExit) {
rpcFreeCont(rpcMsg.pCont);
rpcMsg.pCont = NULL;
} else {
- if (tmsgPutToQueue(&pVnode->msgCb, WRITE_QUEUE, &rpcMsg) < 0) {
- vTrace("vgId:%d, failed to put vnode commit to queue since %s", pVnode->config.vgId, terrstr());
+ int32_t code = 0;
+ if ((code = tmsgPutToQueue(&pVnode->msgCb, WRITE_QUEUE, &rpcMsg)) < 0) {
+ vError("vgId:%d, failed to put vnode commit to write_queue since %s", pVnode->config.vgId, tstrerror(code));
}
}
}
@@ -449,7 +450,9 @@ static int32_t vnodeSyncApplyMsg(const SSyncFSM *pFsm, SRpcMsg *pMsg, const SFsm
pVnode->config.vgId, pFsm, pMeta->index, pMeta->term, pMsg->info.conn.applyIndex, pMeta->isWeak, pMeta->code,
pMeta->state, syncStr(pMeta->state), TMSG_INFO(pMsg->msgType), pMsg->code);
- return tmsgPutToQueue(&pVnode->msgCb, APPLY_QUEUE, pMsg);
+ int32_t code = tmsgPutToQueue(&pVnode->msgCb, APPLY_QUEUE, pMsg);
+ if (code < 0) vError("vgId:%d, failed to put into apply_queue since %s", pVnode->config.vgId, tstrerror(code));
+ return code;
}
static int32_t vnodeSyncCommitMsg(const SSyncFSM *pFsm, SRpcMsg *pMsg, SFsmCbMeta *pMeta) {
diff --git a/source/libs/sync/src/syncPipeline.c b/source/libs/sync/src/syncPipeline.c
index 8d81a03344..34f48d5960 100644
--- a/source/libs/sync/src/syncPipeline.c
+++ b/source/libs/sync/src/syncPipeline.c
@@ -733,9 +733,11 @@ int32_t syncFsmExecute(SSyncNode* pNode, SSyncFSM* pFsm, ESyncState role, SyncTe
if (retry) {
taosMsleep(10);
if (code == TSDB_CODE_OUT_OF_RPC_MEMORY_QUEUE) {
- sError("vgId:%d, failed to execute fsm since %s. index:%" PRId64, pNode->vgId, terrstr(), pEntry->index);
+ sError("vgId:%d, will retry to execute fsm after 10ms, last error is %s. index:%" PRId64, pNode->vgId,
+ tstrerror(code), pEntry->index);
} else {
- sDebug("vgId:%d, retry on fsm commit since %s. index:%" PRId64, pNode->vgId, terrstr(), pEntry->index);
+ sDebug("vgId:%d, will retry to execute fsm after 10ms, last error is %s. index:%" PRId64, pNode->vgId,
+ tstrerror(code), pEntry->index);
}
}
} while (retry);
From 31376ba71494e4a5bedcf4a7d4a7508db797fa14 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 15:17:43 +0800
Subject: [PATCH 084/105] docs: csv-related parameters descriptions
---
.../14-reference/02-tools/10-taosbenchmark.md | 26 +++++++++++++++-
.../14-reference/02-tools/10-taosbenchmark.md | 31 +++++++++++++++++--
2 files changed, 54 insertions(+), 3 deletions(-)
diff --git a/docs/en/14-reference/02-tools/10-taosbenchmark.md b/docs/en/14-reference/02-tools/10-taosbenchmark.md
index cfc92b4e0b..3c1401de68 100644
--- a/docs/en/14-reference/02-tools/10-taosbenchmark.md
+++ b/docs/en/14-reference/02-tools/10-taosbenchmark.md
@@ -188,9 +188,12 @@ taosBenchmark -A INT,DOUBLE,NCHAR,BINARY\(16\)
The parameters listed in this section apply to all functional modes.
-- **filetype**: The function to test, possible values are `insert`, `query`, and `subscribe`. Corresponding to insert, query, and subscribe functions. Only one can be specified in each configuration file.
+- **filetype**: The function to test, possible values are `insert`, `query`, `subscribe` and `csvfile`. Corresponding to insert, query, subscribe and generate csv file functions. Only one can be specified in each configuration file.
+
- **cfgdir**: Directory where the TDengine client configuration file is located, default path is /etc/taos.
+- **output_dir**: The directory specified for output files. When the feature category is csvfile, it refers to the directory where the generated csv files will be saved. The default value is ./output/.
+
- **host**: Specifies the FQDN of the TDengine server to connect to, default value is localhost.
- **port**: The port number of the TDengine server to connect to, default value is 6030.
@@ -283,6 +286,27 @@ Parameters related to supertable creation are configured in the `super_tables` s
- **repeat_ts_max** : Numeric type, when composite primary key is enabled, specifies the maximum number of records with the same timestamp to be generated
- **sqls** : Array of strings type, specifies the array of sql to be executed after the supertable is successfully created, the table name specified in sql must be prefixed with the database name, otherwise an unspecified database error will occur
+- **csv_file_prefix**: String type, sets the prefix for the names of the generated csv files. Default value is "data".
+
+- **csv_ts_format**: String type, sets the format of the time string in the names of the generated csv files, following the `strftime` format standard. If not set, files will not be split by time intervals. Supported patterns include:
+ - %Y: Year as a four-digit number (e.g., 2025)
+ - %m: Month as a two-digit number (01 to 12)
+ - %d: Day of the month as a two-digit number (01 to 31)
+ - %H: Hour in 24-hour format as a two-digit number (00 to 23)
+ - %M: Minute as a two-digit number (00 to 59)
+ - %S: Second as a two-digit number (00 to 59)
+
+- **csv_ts_interval**: String type, sets the time interval for splitting generated csv file names. Supports daily, hourly, minute, and second intervals such as 1d/2h/30m/40s. The default value is "1d".
+
+- **csv_output_header**: String type, sets whether the generated csv files should contain column header descriptions. The default value is "true".
+
+- **csv_tbname_alias**: String type, sets the alias for the tbname field in the column header descriptions of csv files. The default value is "device_id".
+
+- **csv_compress_level**: String type, sets the compression level when generating csv files and automatically compressing them into gzip format. Possible values are:
+ - none: No compression
+ - fast: gzip level 1 compression
+ - balance: gzip level 6 compression
+ - best: gzip level 9 compression
#### Tag and Data Columns
diff --git a/docs/zh/14-reference/02-tools/10-taosbenchmark.md b/docs/zh/14-reference/02-tools/10-taosbenchmark.md
index 56f9e5b122..9902fa56c9 100644
--- a/docs/zh/14-reference/02-tools/10-taosbenchmark.md
+++ b/docs/zh/14-reference/02-tools/10-taosbenchmark.md
@@ -93,14 +93,17 @@ taosBenchmark -f
本节所列参数适用于所有功能模式。
-- **filetype**:功能分类,可选值为 `insert`、`query` 和 `subscribe`。分别对应插入、查询和订阅功能。每个配置文件中只能指定其中之一。
+- **filetype**:功能分类,可选值为 `insert`、`query`、`subscribe` 和 `csvfile`。分别对应插入、查询、订阅和生成csv文件功能。每个配置文件中只能指定其中之一。
+
- **cfgdir**:TDengine 客户端配置文件所在的目录,默认路径是 /etc/taos 。
+- **output_dir**:指定输出文件的目录,当功能分类是 `csvfile` 时,指生成的 csv 文件的保存目录,默认值为 ./output/ 。
+
- **host**:指定要连接的 TDengine 服务端的 FQDN,默认值为 localhost 。
- **port**:要连接的 TDengine 服务器的端口号,默认值为 6030 。
-- **user**:用于连接 TDengine 服务端的用户名,默认为 root 。
+- **user**:用于连接 TDengine 服务端的用户名,默认值为 root 。
- **password**:用于连接 TDengine 服务端的密码,默认值为 taosdata。
@@ -184,10 +187,34 @@ taosBenchmark -f
- **tags_file**:仅当 insert_mode 为 taosc,rest 的模式下生效。最终的 tag 的数值与 childtable_count 有关,如果 csv 文件内的 tag 数据行小于给定的子表数量,那么会循环读取 csv 文件数据直到生成 childtable_count 指定的子表数量;否则则只会读取 childtable_count 行 tag 数据。也即最终生成的子表数量为二者取小。
- **primary_key**:指定超级表是否有复合主键,取值 1 和 0,复合主键列只能是超级表的第二列,指定生成复合主键后要确保第二列符合复合主键的数据类型,否则会报错。
+
- **repeat_ts_min**:数值类型,复合主键开启情况下指定生成相同时间戳记录的最小个数,生成相同时间戳记录的个数是在范围[repeat_ts_min, repeat_ts_max] 内的随机值,最小值等于最大值时为固定个数。
+
- **repeat_ts_max**:数值类型,复合主键开启情况下指定生成相同时间戳记录的最大个数。
+
- **sqls**:字符串数组类型,指定超级表创建成功后要执行的 sql 数组,sql 中指定表名前面要带数据库名,否则会报未指定数据库错误。
+- **csv_file_prefix**:字符串类型,设置生成的 csv 文件名称的前缀,默认值为 data 。
+
+- **csv_ts_format**:字符串类型,设置生成的 csv 文件名称中时间字符串的格式,格式遵循 `strftime` 格式标准,如果没有设置表示不按照时间段切分文件。支持的模式有:
+ - %Y: 年份,四位数表示(例如:2025)
+ - %m: 月份,两位数表示(01到12)
+ - %d: 一个月中的日子,两位数表示(01到31)
+ - %H: 小时,24小时制,两位数表示(00到23)
+ - %M: 分钟,两位数表示(00到59)
+ - %S: 秒,两位数表示(00到59)
+
+- **csv_ts_interval**:字符串类型,设置生成的 csv 文件名称中时间段间隔,支持天、小时、分钟、秒级间隔,如 1d/2h/30m/40s,默认值为 1d 。
+
+- **csv_output_header**:字符串类型,设置生成的 csv 文件是否包含列头描述,默认值为 true 。
+
+- **csv_tbname_alias**:字符串类型,设置 csv 文件列头描述中 tbname 字段的别名,默认值为 device_id 。
+
+- **csv_compress_level**:字符串类型,设置生成 csv 并自动压缩成 gzip 格式文件的压缩等级。可选值为:
+ - none:不压缩
+ - fast:gzip 1级压缩
+ - balance:gzip 6级压缩
+ - best:gzip 9级压缩
#### 标签列与数据列
From 2e64d8578e2889523f5e24c8bafdde10e027f849 Mon Sep 17 00:00:00 2001
From: factosea <285808407@qq.com>
Date: Thu, 6 Mar 2025 15:59:58 +0800
Subject: [PATCH 085/105] feat: doc desc
---
docs/en/14-reference/03-taos-sql/24-show.md | 9 +++++----
docs/zh/14-reference/03-taos-sql/24-show.md | 9 +++++----
2 files changed, 10 insertions(+), 8 deletions(-)
diff --git a/docs/en/14-reference/03-taos-sql/24-show.md b/docs/en/14-reference/03-taos-sql/24-show.md
index b46fb41fa0..589caab39d 100644
--- a/docs/en/14-reference/03-taos-sql/24-show.md
+++ b/docs/en/14-reference/03-taos-sql/24-show.md
@@ -127,10 +127,11 @@ Displays created indexes.
## SHOW LOCAL VARIABLES
```sql
-SHOW LOCAL VARIABLES;
+SHOW LOCAL VARIABLES [like pattern];
```
Displays the runtime values of configuration parameters for the current client.
+You can use the like pattern to filter by name.
## SHOW MNODES
@@ -320,11 +321,11 @@ Displays information about all users in the current system, including user-defin
## SHOW CLUSTER VARIABLES (before version 3.0.1.6 it was SHOW VARIABLES)
```sql
-SHOW CLUSTER VARIABLES;
-SHOW DNODE dnode_id VARIABLES;
+SHOW CLUSTER VARIABLES [like pattern];
+SHOW DNODE dnode_id VARIABLES [like pattern];
```
-Displays the runtime values of configuration parameters that need to be the same across nodes in the current system, or you can specify a DNODE to view its configuration parameters.
+Displays the runtime values of configuration parameters that need to be the same across nodes in the current system, or you can specify a DNODE to view its configuration parameters. And you can use the like pattern to filter by name.
## SHOW VGROUPS
diff --git a/docs/zh/14-reference/03-taos-sql/24-show.md b/docs/zh/14-reference/03-taos-sql/24-show.md
index 3898920e65..622cc544b2 100644
--- a/docs/zh/14-reference/03-taos-sql/24-show.md
+++ b/docs/zh/14-reference/03-taos-sql/24-show.md
@@ -128,10 +128,10 @@ SHOW INDEXES FROM [db_name.]tbl_name;
## SHOW LOCAL VARIABLES
```sql
-SHOW LOCAL VARIABLES;
+SHOW LOCAL VARIABLES [like pattern];
```
-显示当前客户端配置参数的运行值。
+显示当前客户端配置参数的运行值,可使用 like pattern 根据 name 进行过滤。
## SHOW MNODES
@@ -322,11 +322,12 @@ SHOW USERS;
## SHOW CLUSTER VARIABLES(3.0.1.6 之前为 SHOW VARIABLES)
```sql
-SHOW CLUSTER VARIABLES;
-SHOW DNODE dnode_id VARIABLES;
+SHOW CLUSTER VARIABLES [like pattern];;
+SHOW DNODE dnode_id VARIABLES [like pattern];;
```
显示当前系统中各节点需要相同的配置参数的运行值,也可以指定 DNODE 来查看其的配置参数。
+可使用 like pattern 根据 name 进行过滤。
## SHOW VGROUPS
From 77a4b818fb6a3adf0afacbd0fe9f3e554da45cea Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 16:06:11 +0800
Subject: [PATCH 086/105] test: add csv test config file
---
.../benchmark/basic/json/csv-export.json | 172 ++++++++++++++++++
.../tools/benchmark/basic/json/exportCsv.json | 78 --------
2 files changed, 172 insertions(+), 78 deletions(-)
create mode 100644 tests/army/tools/benchmark/basic/json/csv-export.json
delete mode 100644 tests/army/tools/benchmark/basic/json/exportCsv.json
diff --git a/tests/army/tools/benchmark/basic/json/csv-export.json b/tests/army/tools/benchmark/basic/json/csv-export.json
new file mode 100644
index 0000000000..2dbe2300a8
--- /dev/null
+++ b/tests/army/tools/benchmark/basic/json/csv-export.json
@@ -0,0 +1,172 @@
+{
+ "filetype": "csvfile",
+ "output_path": "./csv/",
+ "databases": [
+ {
+ "dbinfo": {
+ "name": "csvdb",
+ "precision": "ms"
+ },
+ "super_tables": [
+ {
+ "name": "interlace-normal",
+ "childtable_count": 1010,
+ "insert_rows": 1000,
+ "interlace_rows": 1,
+ "childtable_prefix": "d",
+ "timestamp_step": 1000000,
+ "start_timestamp":1700000000000,
+ "childtable_from": 1000,
+ "childtable_to": 1010,
+ "csv_file_prefix": "data",
+ "csv_output_header": "true",
+ "csv_tbname_alias": "device_id",
+ "csv_compress_level": "none",
+ "columns": [
+ { "type": "bool", "name": "bc"},
+ { "type": "float", "name": "fc", "min": 1},
+ { "type": "double", "name": "dc", "min":10, "max":10},
+ { "type": "tinyint", "name": "ti"},
+ { "type": "smallint", "name": "si"},
+ { "type": "int", "name": "ic", "fillNull":"false"},
+ { "type": "bigint", "name": "bi"},
+ { "type": "utinyint", "name": "uti"},
+ { "type": "usmallint", "name": "usi", "min":100, "max":120},
+ { "type": "uint", "name": "ui"},
+ { "type": "ubigint", "name": "ubi"},
+ { "type": "binary", "name": "bin", "len": 16},
+ { "type": "nchar", "name": "nch", "len": 16}
+ ],
+ "tags": [
+ {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
+ {"type": "binary", "name": "location", "len": 16,
+ "values": ["San Francisco", "Los Angles", "San Diego",
+ "San Jose", "Palo Alto", "Campbell", "Mountain View",
+ "Sunnyvale", "Santa Clara", "Cupertino"]
+ }
+ ]
+ },
+ {
+ "name": "interlace-timeslice",
+ "childtable_count": 1010,
+ "insert_rows": 1000,
+ "interlace_rows": 1,
+ "childtable_prefix": "d",
+ "timestamp_step": 1000000,
+ "start_timestamp":1700000000000,
+ "childtable_from": 1000,
+ "childtable_to": 1010,
+ "csv_file_prefix": "data",
+ "csv_ts_format": "%Y%m%d",
+ "csv_ts_interval": "1d",
+ "csv_output_header": "true",
+ "csv_tbname_alias": "device_id",
+ "csv_compress_level": "none",
+ "columns": [
+ { "type": "bool", "name": "bc"},
+ { "type": "float", "name": "fc", "min": 1},
+ { "type": "double", "name": "dc", "min":10, "max":10},
+ { "type": "tinyint", "name": "ti"},
+ { "type": "smallint", "name": "si"},
+ { "type": "int", "name": "ic", "fillNull":"false"},
+ { "type": "bigint", "name": "bi"},
+ { "type": "utinyint", "name": "uti"},
+ { "type": "usmallint", "name": "usi", "min":100, "max":120},
+ { "type": "uint", "name": "ui"},
+ { "type": "ubigint", "name": "ubi"},
+ { "type": "binary", "name": "bin", "len": 16},
+ { "type": "nchar", "name": "nch", "len": 16}
+ ],
+ "tags": [
+ {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
+ {"type": "binary", "name": "location", "len": 16,
+ "values": ["San Francisco", "Los Angles", "San Diego",
+ "San Jose", "Palo Alto", "Campbell", "Mountain View",
+ "Sunnyvale", "Santa Clara", "Cupertino"]
+ }
+ ]
+ },
+ {
+ "name": "batch-normal",
+ "childtable_count": 1010,
+ "insert_rows": 1000,
+ "interlace_rows": 0,
+ "thread_count": 8,
+ "childtable_prefix": "d",
+ "timestamp_step": 1000000,
+ "start_timestamp":1700000000000,
+ "childtable_from": 1000,
+ "childtable_to": 1010,
+ "csv_file_prefix": "data",
+ "csv_output_header": "true",
+ "csv_tbname_alias": "device_id",
+ "csv_compress_level": "none",
+ "columns": [
+ { "type": "bool", "name": "bc"},
+ { "type": "float", "name": "fc", "min": 1},
+ { "type": "double", "name": "dc", "min":10, "max":10},
+ { "type": "tinyint", "name": "ti"},
+ { "type": "smallint", "name": "si"},
+ { "type": "int", "name": "ic", "fillNull":"false"},
+ { "type": "bigint", "name": "bi"},
+ { "type": "utinyint", "name": "uti"},
+ { "type": "usmallint", "name": "usi", "min":100, "max":120},
+ { "type": "uint", "name": "ui"},
+ { "type": "ubigint", "name": "ubi"},
+ { "type": "binary", "name": "bin", "len": 16},
+ { "type": "nchar", "name": "nch", "len": 16}
+ ],
+ "tags": [
+ {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
+ {"type": "binary", "name": "location", "len": 16,
+ "values": ["San Francisco", "Los Angles", "San Diego",
+ "San Jose", "Palo Alto", "Campbell", "Mountain View",
+ "Sunnyvale", "Santa Clara", "Cupertino"]
+ }
+ ]
+ },
+ {
+ "name": "batch-timeslice",
+ "childtable_count": 1010,
+ "insert_rows": 1000,
+ "interlace_rows": 0,
+ "thread_count": 8,
+ "childtable_prefix": "d",
+ "timestamp_step": 1000000,
+ "start_timestamp":1700000000000,
+ "childtable_from": 1000,
+ "childtable_to": 1010,
+ "csv_file_prefix": "data",
+ "csv_ts_format": "%Y%m%d",
+ "csv_ts_interval": "1d",
+ "csv_output_header": "true",
+ "csv_tbname_alias": "device_id",
+ "csv_compress_level": "none",
+ "columns": [
+ { "type": "bool", "name": "bc"},
+ { "type": "float", "name": "fc", "min": 1},
+ { "type": "double", "name": "dc", "min":10, "max":10},
+ { "type": "tinyint", "name": "ti"},
+ { "type": "smallint", "name": "si"},
+ { "type": "int", "name": "ic", "fillNull":"false"},
+ { "type": "bigint", "name": "bi"},
+ { "type": "utinyint", "name": "uti"},
+ { "type": "usmallint", "name": "usi", "min":100, "max":120},
+ { "type": "uint", "name": "ui"},
+ { "type": "ubigint", "name": "ubi"},
+ { "type": "binary", "name": "bin", "len": 16},
+ { "type": "nchar", "name": "nch", "len": 16}
+ ],
+ "tags": [
+ {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
+ {"type": "binary", "name": "location", "len": 16,
+ "values": ["San Francisco", "Los Angles", "San Diego",
+ "San Jose", "Palo Alto", "Campbell", "Mountain View",
+ "Sunnyvale", "Santa Clara", "Cupertino"]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
diff --git a/tests/army/tools/benchmark/basic/json/exportCsv.json b/tests/army/tools/benchmark/basic/json/exportCsv.json
deleted file mode 100644
index 05a7341eb6..0000000000
--- a/tests/army/tools/benchmark/basic/json/exportCsv.json
+++ /dev/null
@@ -1,78 +0,0 @@
-{
- "filetype": "csvfile",
- "csvPath": "./csv/",
- "num_of_records_per_req": 10000,
- "databases": [
- {
- "dbinfo": {
- "name": "csvdb"
- },
- "super_tables": [
- {
- "name": "batchTable",
- "childtable_count": 5,
- "insert_rows": 100,
- "interlace_rows": 0,
- "childtable_prefix": "d",
- "timestamp_step": 10,
- "start_timestamp":1600000000000,
- "columns": [
- { "type": "bool", "name": "bc"},
- { "type": "float", "name": "fc", "min": 1},
- { "type": "double", "name": "dc", "min":10, "max":10},
- { "type": "tinyint", "name": "ti"},
- { "type": "smallint", "name": "si"},
- { "type": "int", "name": "ic", "fillNull":"false"},
- { "type": "bigint", "name": "bi"},
- { "type": "utinyint", "name": "uti"},
- { "type": "usmallint", "name": "usi", "min":100, "max":120},
- { "type": "uint", "name": "ui"},
- { "type": "ubigint", "name": "ubi"},
- { "type": "binary", "name": "bin", "len": 16},
- { "type": "nchar", "name": "nch", "len": 16}
- ],
- "tags": [
- {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
- {"type": "binary", "name": "location", "len": 16,
- "values": ["San Francisco", "Los Angles", "San Diego",
- "San Jose", "Palo Alto", "Campbell", "Mountain View",
- "Sunnyvale", "Santa Clara", "Cupertino"]
- }
- ]
- },
- {
- "name": "interlaceTable",
- "childtable_count": 5,
- "insert_rows": 100,
- "interlace_rows": 10,
- "childtable_prefix": "d",
- "timestamp_step": 1000,
- "start_timestamp":1700000000000,
- "columns": [
- { "type": "bool", "name": "bc"},
- { "type": "float", "name": "fc", "min":16},
- { "type": "double", "name": "dc", "min":16},
- { "type": "tinyint", "name": "ti"},
- { "type": "smallint", "name": "si"},
- { "type": "int", "name": "ic", "fillNull":"false"},
- { "type": "bigint", "name": "bi"},
- { "type": "utinyint", "name": "uti"},
- { "type": "usmallint", "name": "usi"},
- { "type": "uint", "name": "ui"},
- { "type": "ubigint", "name": "ubi"},
- { "type": "binary", "name": "bin", "len": 32},
- { "type": "nchar", "name": "nch", "len": 64}
- ],
- "tags": [
- {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
- {"type": "binary", "name": "location", "len": 16,
- "values": ["San Francisco", "Los Angles", "San Diego",
- "San Jose", "Palo Alto", "Campbell", "Mountain View",
- "Sunnyvale", "Santa Clara", "Cupertino"]
- }
- ]
- }
- ]
- }
- ]
-}
From beb8d55337c7b39a246d709c2f2caee6b18ac0e9 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 16:29:05 +0800
Subject: [PATCH 087/105] fix: add missing header file
---
tools/taos-tools/inc/benchLog.h | 1 +
1 file changed, 1 insertion(+)
diff --git a/tools/taos-tools/inc/benchLog.h b/tools/taos-tools/inc/benchLog.h
index 961a037e3c..ab74aaff75 100644
--- a/tools/taos-tools/inc/benchLog.h
+++ b/tools/taos-tools/inc/benchLog.h
@@ -17,6 +17,7 @@
#define INC_BENCHLOG_H_
#include
+#include
//
// suport thread safe log module
From 95116370b57cbb07e5b6d6c259f5e793a5396778 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 16:45:46 +0800
Subject: [PATCH 088/105] refactor: minor changes
---
README-CN.md | 6 +-
README.md | 2 +-
cmake/cmake.define | 5 +-
source/libs/function/test/CMakeLists.txt | 91 +++++++++++++++++++-----
tests/ci/container_build_newmachine.sh | 2 +-
tests/parallel_test/container_build.sh | 6 +-
6 files changed, 84 insertions(+), 28 deletions(-)
diff --git a/README-CN.md b/README-CN.md
index 40e97de2ba..ad622b3896 100644
--- a/README-CN.md
+++ b/README-CN.md
@@ -162,10 +162,10 @@ make
可以使用Jemalloc作为内存分配器,而不是使用glibc:
```bash
-cmake .. -DJEMALLOC_ENABLED=true
+cmake .. -DJEMALLOC_ENABLED=ON
```
-TDengine构建脚本可以自动检测x86、x86-64、arm64平台上主机的体系结构。
-您也可以通过CPUTYPE选项手动指定架构:
+TDengine构建脚本可以自动检测 x86、x86-64、arm64 平台上主机的体系结构。
+您也可以通过 CPUTYPE 选项手动指定架构:
```bash
cmake .. -DCPUTYPE=aarch64 && cmake --build .
diff --git a/README.md b/README.md
index f7db2a7ea2..9bc6c5dd83 100644
--- a/README.md
+++ b/README.md
@@ -177,7 +177,7 @@ If you want to compile taosKeeper, you need to add the `--DBUILD_KEEPER=true` op
You can use Jemalloc as memory allocator instead of glibc:
```bash
-cmake .. -DJEMALLOC_ENABLED=true
+cmake .. -DJEMALLOC_ENABLED=ON
```
TDengine build script can auto-detect the host machine's architecture on x86, x86-64, arm64 platform.
diff --git a/cmake/cmake.define b/cmake/cmake.define
index dae8020dcc..72c9e84f78 100644
--- a/cmake/cmake.define
+++ b/cmake/cmake.define
@@ -116,7 +116,6 @@ ELSE()
set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz")
ENDIF()
-# force set all platform to JEMALLOC_ENABLED = false
# SET(JEMALLOC_ENABLED OFF)
IF(TD_WINDOWS)
@@ -262,10 +261,10 @@ ENDIF()
IF(TD_LINUX)
IF(${JEMALLOC_ENABLED})
- MESSAGE(STATUS "JEMALLOC_ENABLED Enabled")
+ MESSAGE(STATUS "JEMALLOC Enabled")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=attributes")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=attributes")
ELSE()
- MESSAGE(STATUS "JEMALLOC_ENABLED Disabled")
+ MESSAGE(STATUS "JEMALLOC Disabled")
ENDIF()
ENDIF()
\ No newline at end of file
diff --git a/source/libs/function/test/CMakeLists.txt b/source/libs/function/test/CMakeLists.txt
index d805adff3a..43107a952a 100644
--- a/source/libs/function/test/CMakeLists.txt
+++ b/source/libs/function/test/CMakeLists.txt
@@ -1,33 +1,78 @@
-set(TD_UDF_INC
+
+
+add_executable(runUdf runUdf.c)
+target_include_directories(
+ runUdf
+ PUBLIC
"${TD_SOURCE_DIR}/include/libs/function"
"${TD_SOURCE_DIR}/contrib/libuv/include"
"${TD_SOURCE_DIR}/include/util"
"${TD_SOURCE_DIR}/include/common"
"${TD_SOURCE_DIR}/include/client"
"${TD_SOURCE_DIR}/include/os"
- "${CMAKE_CURRENT_SOURCE_DIR}/../inc"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
-set(TD_UDF_LIB uv_a PRIVATE os util common nodes function)
-
-add_executable(runUdf runUdf.c)
-target_include_directories(runUdf PUBLIC ${TD_UDF_INC})
-target_link_libraries(runUdf PUBLIC ${TD_UDF_LIB})
+target_link_libraries(
+ runUdf
+ PUBLIC uv_a
+ PRIVATE os util common nodes function
+)
add_library(udf1 STATIC MODULE udf1.c)
-target_include_directories(udf1 PUBLIC ${TD_UDF_INC})
+target_include_directories(
+ udf1
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+)
+
target_link_libraries(udf1 PUBLIC os)
-add_library(udf2 STATIC MODULE udf2.c)
-target_include_directories(udf2 PUBLIC ${TD_UDF_INC})
-target_link_libraries(udf2 PUBLIC os)
-
add_library(udf1_dup STATIC MODULE udf1_dup.c)
-target_include_directories(udf1_dup PUBLIC ${TD_UDF_INC})
+target_include_directories(
+ udf1_dup
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+)
+
target_link_libraries(udf1_dup PUBLIC os)
+add_library(udf2 STATIC MODULE udf2.c)
+target_include_directories(
+ udf2
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+)
+
+target_link_libraries(udf2 PUBLIC os)
+
add_library(udf2_dup STATIC MODULE udf2_dup.c)
-target_include_directories(udf2_dup PUBLIC ${TD_UDF_INC})
+target_include_directories(
+ udf2_dup
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+)
+
target_link_libraries(udf2_dup PUBLIC os)
set(TARGET_NAMES
@@ -53,8 +98,20 @@ set(COMPILE_DEFINITIONS
foreach(index RANGE 0 6)
list(GET TARGET_NAMES ${index} target_name)
list(GET COMPILE_DEFINITIONS ${index} compile_def)
+
add_library(${target_name} STATIC MODULE change_udf.c)
- target_include_directories(${target_name} PUBLIC ${TD_UDF_INC})
+ target_include_directories(
+ ${target_name}
+ PUBLIC
+ "${TD_SOURCE_DIR}/include/libs/function"
+ "${TD_SOURCE_DIR}/include/util"
+ "${TD_SOURCE_DIR}/include/common"
+ "${TD_SOURCE_DIR}/include/client"
+ "${TD_SOURCE_DIR}/include/os"
+ PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
+ )
target_compile_definitions(${target_name} PRIVATE ${compile_def})
- target_link_libraries(${target_name} PUBLIC os)
-endforeach()
\ No newline at end of file
+ target_link_libraries(
+ ${target_name} PUBLIC os
+ )
+endforeach()
diff --git a/tests/ci/container_build_newmachine.sh b/tests/ci/container_build_newmachine.sh
index 369429b99f..f0d69e6171 100755
--- a/tests/ci/container_build_newmachine.sh
+++ b/tests/ci/container_build_newmachine.sh
@@ -61,7 +61,7 @@ docker run \
-v /root/go/pkg/mod:/root/go/pkg/mod \
-v /root/.cache/go-build:/root/.cache/go-build \
-v /root/.cos-local.1:/root/.cos-local.2 \
- --rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y;pip3 install taospy==2.7.2;cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true $CMAKE_BUILD_TYPE -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0;make -j 10|| exit 1 "
+ --rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y; pip3 install taospy==2.7.2; cd $REP_DIR; rm -rf debug ;mkdir -p debug; cd debug; cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=ON -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true $CMAKE_BUILD_TYPE -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=OFF; make -j 10|| exit 1 "
# -v ${REP_REAL_PATH}/community/contrib/jemalloc/:${REP_DIR}/community/contrib/jemalloc \
if [[ -d ${WORKDIR}/debugNoSan ]] ;then
diff --git a/tests/parallel_test/container_build.sh b/tests/parallel_test/container_build.sh
index abff489397..35388fc339 100755
--- a/tests/parallel_test/container_build.sh
+++ b/tests/parallel_test/container_build.sh
@@ -81,7 +81,7 @@ docker run \
-v ${REP_REAL_PATH}/community/contrib/pcre2/:${REP_DIR}/community/contrib/pcre2 \
-v ${REP_REAL_PATH}/community/contrib/zlib/:${REP_DIR}/community/contrib/zlib \
-v ${REP_REAL_PATH}/community/contrib/zstd/:${REP_DIR}/community/contrib/zstd \
- --rm --ulimit core=-1 taos_test:v1.0 sh -c "cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DCMAKE_EXPORT_COMPILE_COMMANDS=1 ;make -j|| exit 1"
+ --rm --ulimit core=-1 taos_test:v1.0 sh -c "cd $REP_DIR; rm -rf debug; mkdir -p debug; cd debug; cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=ON -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=OFF -DCMAKE_EXPORT_COMPILE_COMMANDS=1 ;make -j|| exit 1"
# -v ${REP_REAL_PATH}/community/contrib/jemalloc/:${REP_DIR}/community/contrib/jemalloc \
if [[ -d ${WORKDIR}/debugNoSan ]] ;then
@@ -133,7 +133,7 @@ docker run \
-v ${REP_REAL_PATH}/community/contrib/pcre2/:${REP_DIR}/community/contrib/pcre2 \
-v ${REP_REAL_PATH}/community/contrib/zlib/:${REP_DIR}/community/contrib/zlib \
-v ${REP_REAL_PATH}/community/contrib/zstd/:${REP_DIR}/community/contrib/zstd \
- --rm --ulimit core=-1 taos_test:v1.0 sh -c "cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=false -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true -DCMAKE_BUILD_TYPE=Debug -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0;make -j|| exit 1 "
+ --rm --ulimit core=-1 taos_test:v1.0 sh -c "cd $REP_DIR; rm -rf debug; mkdir -p debug; cd debug; cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=ON -DWEBSOCKET=true -DBUILD_SANITIZER=1 -DTOOLS_SANITIZE=true -DCMAKE_BUILD_TYPE=Debug -DTOOLS_BUILD_TYPE=Debug -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=OFF; make -j|| exit 1 "
mv ${REP_REAL_PATH}/debug ${WORKDIR}/debugSan
@@ -168,7 +168,7 @@ date
# -v ${REP_REAL_PATH}/community/contrib/xml2/:${REP_DIR}/community/contrib/xml2 \
# -v ${REP_REAL_PATH}/community/contrib/zlib/:${REP_DIR}/community/contrib/zlib \
# -v ${REP_REAL_PATH}/community/contrib/zstd/:${REP_DIR}/community/contrib/zstd \
-# --rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y;pip3 install taospy==2.7.2;cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=false -DWEBSOCKET=true -DCMAKE_BUILD_TYPE=Release -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0;make -j || exit 1 "
+# --rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y; pip3 install taospy==2.7.2; cd $REP_DIR; rm -rf debug; mkdir -p debug; cd debug; cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=ON -DWEBSOCKET=true -DCMAKE_BUILD_TYPE=Release -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=OFF; make -j || exit 1 "
# mv ${REP_REAL_PATH}/debug ${WORKDIR}/debugRelease
From 2a0584114d44799a749892a48d8f86630d20bd8d Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 17:01:50 +0800
Subject: [PATCH 089/105] fix: compile errors
---
include/common/tcommon.h | 2 +-
source/libs/function/test/CMakeLists.txt | 87 ++++--------------------
2 files changed, 13 insertions(+), 76 deletions(-)
diff --git a/include/common/tcommon.h b/include/common/tcommon.h
index bd5bdb927d..45f3869b1b 100644
--- a/include/common/tcommon.h
+++ b/include/common/tcommon.h
@@ -124,7 +124,7 @@ enum {
TMQ_MSG_TYPE__POLL_RAW_DATA_RSP,
};
-static char* tmqMsgTypeStr[] = {
+static const char* tmqMsgTypeStr[] = {
"data", "meta", "ask ep", "meta data", "wal info", "batch meta", "raw data"
};
diff --git a/source/libs/function/test/CMakeLists.txt b/source/libs/function/test/CMakeLists.txt
index 43107a952a..aa350b12c2 100644
--- a/source/libs/function/test/CMakeLists.txt
+++ b/source/libs/function/test/CMakeLists.txt
@@ -1,18 +1,12 @@
-
+include_directories("${TD_SOURCE_DIR}/include/libs/function")
+include_directories("${TD_SOURCE_DIR}/contrib/libuv/include")
+include_directories("${TD_SOURCE_DIR}/include/util")
+include_directories("${TD_SOURCE_DIR}/include/common")
+include_directories("${TD_SOURCE_DIR}/include/client")
+include_directories("${TD_SOURCE_DIR}/include/os")
+include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../inc")
add_executable(runUdf runUdf.c)
-target_include_directories(
- runUdf
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/contrib/libuv/include"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
target_link_libraries(
runUdf
PUBLIC uv_a
@@ -20,59 +14,15 @@ target_link_libraries(
)
add_library(udf1 STATIC MODULE udf1.c)
-target_include_directories(
- udf1
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
target_link_libraries(udf1 PUBLIC os)
-add_library(udf1_dup STATIC MODULE udf1_dup.c)
-target_include_directories(
- udf1_dup
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
-target_link_libraries(udf1_dup PUBLIC os)
-
add_library(udf2 STATIC MODULE udf2.c)
-target_include_directories(
- udf2
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
-
target_link_libraries(udf2 PUBLIC os)
-add_library(udf2_dup STATIC MODULE udf2_dup.c)
-target_include_directories(
- udf2_dup
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
-)
+add_library(udf1_dup STATIC MODULE udf1_dup.c)
+target_link_libraries(udf1_dup PUBLIC os)
+add_library(udf2_dup STATIC MODULE udf2_dup.c)
target_link_libraries(udf2_dup PUBLIC os)
set(TARGET_NAMES
@@ -98,20 +48,7 @@ set(COMPILE_DEFINITIONS
foreach(index RANGE 0 6)
list(GET TARGET_NAMES ${index} target_name)
list(GET COMPILE_DEFINITIONS ${index} compile_def)
-
add_library(${target_name} STATIC MODULE change_udf.c)
- target_include_directories(
- ${target_name}
- PUBLIC
- "${TD_SOURCE_DIR}/include/libs/function"
- "${TD_SOURCE_DIR}/include/util"
- "${TD_SOURCE_DIR}/include/common"
- "${TD_SOURCE_DIR}/include/client"
- "${TD_SOURCE_DIR}/include/os"
- PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
- )
target_compile_definitions(${target_name} PRIVATE ${compile_def})
- target_link_libraries(
- ${target_name} PUBLIC os
- )
-endforeach()
+ target_link_libraries(${target_name} PUBLIC os)
+endforeach()
\ No newline at end of file
From f0276b6644977632b8f5e3714c70bd33725da1d8 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 17:29:54 +0800
Subject: [PATCH 090/105] docs: format
---
docs/zh/08-operation/14-user.md | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/docs/zh/08-operation/14-user.md b/docs/zh/08-operation/14-user.md
index 467b1c3742..0b97ff61a4 100644
--- a/docs/zh/08-operation/14-user.md
+++ b/docs/zh/08-operation/14-user.md
@@ -17,14 +17,14 @@ create user user_name pass'password' [sysinfo {1|0}] [createdb {1|0}]
相关参数说明如下。
- user_name:用户名最长不超过 23 个字节。
-- password:密码长度必须为 8 到 255 。密码要符合一个要求:至少包含大写字母、小写字母、数字、特殊字符中的三类。特殊字符包括 `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`(始自 3.3.5.0 版本),可以通过在taos.cfg中添加参数enableStrongPassword 0关闭这个强制要求,或者通过如下SQL关闭这个强制要求(始自 3.3.6.0 版本)。
+- password:密码长度必须为 8 到 255 个字节。密码至少包含大写字母、小写字母、数字、特殊字符中的三类。特殊字符包括 `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`(始自 v3.3.5.0),可以通过在 taos.cfg 中添加参数 `enableStrongPassword 0` 关闭此强制要求,或者通过如下 SQL 关闭(始自 v3.3.6.0)。
```sql
alter all dnode 'EnableStrongPassword' '0'
```
- sysinfo :用户是否可以查看系统信息。1 表示可以查看,0 表示不可以查看。系统信息包括服务端配置信息、服务端各种节点信息,如 dnode、查询节点(qnode)等,以及与存储相关的信息等。默认为可以查看系统信息。
-- createdb:用户是否可以创建数据库。1 表示可以创建,0 表示不可以创建。缺省值为 0。// 从 TDengine 企业版 3.3.2.0 开始支持
+- createdb:用户是否可以创建数据库。1 表示可以创建,0 表示不可以创建。缺省值为 0。从企业版 v3.3.2.0 开始支持。
-如下 SQL 可以创建密码为 abc123!@# 且可以查看系统信息的用户 test。
+如下 SQL 可以创建密码为 `abc123!@#` 且可以查看系统信息的用户 test。
```sql
create user test pass 'abc123!@#' sysinfo 1
@@ -59,7 +59,7 @@ alter_user_clause: {
- pass:修改用户密码。
- enable:是否启用用户。1 表示启用此用户,0 表示禁用此用户。
- sysinfo :用户是否可查看系统信息。1 表示可以查看系统信息,0 表示不可以查看系统信息
-- createdb:用户是否可创建数据库。1 表示可以创建数据库,0 表示不可以创建数据库。从 TDengine 企业版 3.3.2.0 开始支持。
+- createdb:用户是否可创建数据库。1 表示可以创建数据库,0 表示不可以创建数据库。从企业版 v3.3.2.0 开始支持。
如下 SQL 禁用 test 用户。
```sql
From 840ce50300982e7a0ce697e90f6bff7884b12820 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 17:36:50 +0800
Subject: [PATCH 091/105] docs: format
---
docs/zh/08-operation/14-user.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/zh/08-operation/14-user.md b/docs/zh/08-operation/14-user.md
index 0b97ff61a4..43444c1ff9 100644
--- a/docs/zh/08-operation/14-user.md
+++ b/docs/zh/08-operation/14-user.md
@@ -21,7 +21,7 @@ create user user_name pass'password' [sysinfo {1|0}] [createdb {1|0}]
```sql
alter all dnode 'EnableStrongPassword' '0'
```
-- sysinfo :用户是否可以查看系统信息。1 表示可以查看,0 表示不可以查看。系统信息包括服务端配置信息、服务端各种节点信息,如 dnode、查询节点(qnode)等,以及与存储相关的信息等。默认为可以查看系统信息。
+- sysinfo:用户是否可以查看系统信息。1 表示可以查看,0 表示不可以查看。系统信息包括服务端配置信息、服务端各种节点信息,如 dnode、查询节点(qnode)等,以及与存储相关的信息等。默认为可以查看系统信息。
- createdb:用户是否可以创建数据库。1 表示可以创建,0 表示不可以创建。缺省值为 0。从企业版 v3.3.2.0 开始支持。
如下 SQL 可以创建密码为 `abc123!@#` 且可以查看系统信息的用户 test。
@@ -58,7 +58,7 @@ alter_user_clause: {
相关参数说明如下。
- pass:修改用户密码。
- enable:是否启用用户。1 表示启用此用户,0 表示禁用此用户。
-- sysinfo :用户是否可查看系统信息。1 表示可以查看系统信息,0 表示不可以查看系统信息
+- sysinfo:用户是否可查看系统信息。1 表示可以查看系统信息,0 表示不可以查看系统信息
- createdb:用户是否可创建数据库。1 表示可以创建数据库,0 表示不可以创建数据库。从企业版 v3.3.2.0 开始支持。
如下 SQL 禁用 test 用户。
From 22be5e642e9aa7d9d558455fd7d4a649fc837086 Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 6 Mar 2025 17:52:15 +0800
Subject: [PATCH 092/105] feat: TS-5927-fix-review
---
source/common/src/msg/tmsg.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/source/common/src/msg/tmsg.c b/source/common/src/msg/tmsg.c
index bda197e93d..d4989bb27d 100644
--- a/source/common/src/msg/tmsg.c
+++ b/source/common/src/msg/tmsg.c
@@ -627,7 +627,7 @@ int32_t tDeserializeSClientHbBatchRsp(void *buf, int32_t bufLen, SClientHbBatchR
if (!tDecodeIsEnd(&decoder)) {
TAOS_CHECK_EXIT(tDecodeI8(&decoder, &pBatchRsp->enableStrongPass));
} else {
- pBatchRsp->enableStrongPass = 1;
+ pBatchRsp->enableStrongPass = 0;
}
tEndDecode(&decoder);
From 5821a1c5e1469ab2211a0fa2326b6facfdde97ce Mon Sep 17 00:00:00 2001
From: Haojun Liao
Date: Thu, 6 Mar 2025 14:53:11 +0800
Subject: [PATCH 093/105] chore(analytics): add tdgpt into TDengine repo.
---
tools/tdgpt/README.md | 135 ++++
tools/tdgpt/cfg/taosanode.ini | 81 ++
tools/tdgpt/cfg/taosanoded.service | 22 +
tools/tdgpt/script/install.sh | 748 ++++++++++++++++++
tools/tdgpt/script/release.sh | 100 +++
tools/tdgpt/script/start.sh | 4 +
tools/tdgpt/script/stop.sh | 4 +
tools/tdgpt/script/uninstall.sh | 220 ++++++
tools/tdgpt/taosanalytics/__init__.py | 0
tools/tdgpt/taosanalytics/algo/__init__.py | 0
tools/tdgpt/taosanalytics/algo/ad/__init__.py | 0
.../taosanalytics/algo/ad/autoencoder.py | 117 +++
tools/tdgpt/taosanalytics/algo/ad/grubbs.py | 42 +
tools/tdgpt/taosanalytics/algo/ad/iqr.py | 29 +
tools/tdgpt/taosanalytics/algo/ad/ksigma.py | 47 ++
tools/tdgpt/taosanalytics/algo/ad/lof.py | 43 +
tools/tdgpt/taosanalytics/algo/ad/shesd.py | 44 ++
tools/tdgpt/taosanalytics/algo/anomaly.py | 49 ++
tools/tdgpt/taosanalytics/algo/fc/__init__.py | 0
tools/tdgpt/taosanalytics/algo/fc/arima.py | 114 +++
.../taosanalytics/algo/fc/holtwinters.py | 79 ++
tools/tdgpt/taosanalytics/algo/forecast.py | 110 +++
tools/tdgpt/taosanalytics/app.py | 163 ++++
tools/tdgpt/taosanalytics/conf.py | 105 +++
tools/tdgpt/taosanalytics/misc/__init__.py | 0
tools/tdgpt/taosanalytics/model.py | 22 +
tools/tdgpt/taosanalytics/service.py | 110 +++
tools/tdgpt/taosanalytics/servicemgmt.py | 120 +++
tools/tdgpt/taosanalytics/test/__init__.py | 0
.../tdgpt/taosanalytics/test/anomaly_test.py | 170 ++++
.../tdgpt/taosanalytics/test/forecast_test.py | 115 +++
.../tdgpt/taosanalytics/test/install_test.py | 27 +
.../taosanalytics/test/restful_api_test.py | 259 ++++++
tools/tdgpt/taosanalytics/test/unit_test.py | 106 +++
tools/tdgpt/taosanalytics/util.py | 126 +++
35 files changed, 3311 insertions(+)
create mode 100644 tools/tdgpt/README.md
create mode 100755 tools/tdgpt/cfg/taosanode.ini
create mode 100755 tools/tdgpt/cfg/taosanoded.service
create mode 100755 tools/tdgpt/script/install.sh
create mode 100755 tools/tdgpt/script/release.sh
create mode 100755 tools/tdgpt/script/start.sh
create mode 100755 tools/tdgpt/script/stop.sh
create mode 100755 tools/tdgpt/script/uninstall.sh
create mode 100644 tools/tdgpt/taosanalytics/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/algo/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/autoencoder.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/grubbs.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/iqr.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/ksigma.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/lof.py
create mode 100644 tools/tdgpt/taosanalytics/algo/ad/shesd.py
create mode 100644 tools/tdgpt/taosanalytics/algo/anomaly.py
create mode 100644 tools/tdgpt/taosanalytics/algo/fc/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/algo/fc/arima.py
create mode 100644 tools/tdgpt/taosanalytics/algo/fc/holtwinters.py
create mode 100644 tools/tdgpt/taosanalytics/algo/forecast.py
create mode 100644 tools/tdgpt/taosanalytics/app.py
create mode 100644 tools/tdgpt/taosanalytics/conf.py
create mode 100644 tools/tdgpt/taosanalytics/misc/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/model.py
create mode 100644 tools/tdgpt/taosanalytics/service.py
create mode 100644 tools/tdgpt/taosanalytics/servicemgmt.py
create mode 100644 tools/tdgpt/taosanalytics/test/__init__.py
create mode 100644 tools/tdgpt/taosanalytics/test/anomaly_test.py
create mode 100644 tools/tdgpt/taosanalytics/test/forecast_test.py
create mode 100644 tools/tdgpt/taosanalytics/test/install_test.py
create mode 100644 tools/tdgpt/taosanalytics/test/restful_api_test.py
create mode 100644 tools/tdgpt/taosanalytics/test/unit_test.py
create mode 100644 tools/tdgpt/taosanalytics/util.py
diff --git a/tools/tdgpt/README.md b/tools/tdgpt/README.md
new file mode 100644
index 0000000000..0b61f8bcef
--- /dev/null
+++ b/tools/tdgpt/README.md
@@ -0,0 +1,135 @@
+# Table of Contents
+
+1. [Introduction](#1-introduction)
+1. [Documentation](#2-documentation)
+1. [Prerequisites](#3-prerequisites)
+1. [Building](#4-building)
+1. [Packaging](#5-packaging)
+1. [Installation](#6-installing)
+1. [Running](#7-running)
+1. [Testing](#8-testing)
+1. [Releasing](#9-releasing)
+1. [CI/CD](#10-cicd)
+1. [Coverage](#11-coverage)
+1. [Contributing](#12-contributing)
+
+# 1. Introduction
+tdanalytics: an analytic platform for tdengine
+
+# 2. Documentation
+
+For user manual, system design and architecture, please refer to [TDengine Documentation](https://docs.tdengine.com/next) ([TDengine 文档](https://docs.taosdata.com/next)).
+
+# 3. Prerequisites
+
+List the software and tools required to work on the project.
+
+- python 3.10.12+ (for test)
+
+Step-by-step instructions to set up the prerequisites software.
+
+## 3.1 Install Python3.10
+Make sure Python3.10 or above is available before installing anode in your system.
+
+In case of Ubuntu, use the following instructions to install Python 3.10.
+
+```
+sudo apt-get install software-properties-common
+sudo add-apt-repository ppa:deadsnakes/ppa
+sudo apt update
+sudo apt install python3.10
+sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.10 2
+sudo update-alternatives --config python3
+sudo apt install python3.10-venv
+sudo apt install python3.10-dev
+```
+
+Install the Pip3.10
+
+```bash
+curl -sS https://bootstrap.pypa.io/get-pip.py | python3.10
+```
+
+Add the ``~/.local/bin`` into ``~/.bashrc`` or ``~/.bash_profile``
+
+```bash
+export PATH=$PATH:~/.local/bin
+```
+
+# 4. Building
+There is no need to build the taosanode, since it is implemented in Python, which is an interpreted language.
+
+
+# 5. Packaging
+In the base directory, you can use the following command to package to build an tarball.
+
+```bash
+cd script && ./release.sh
+```
+
+After the packaging is completed, you will find the tarball in the `release` directory.
+
+```bash
+ls -lht /root/tdanalytics/release
+
+-rw-rw-r-- 1 root root 74K Feb 21 17:04 TDengine-enterprise-anode-1.0.1.tar.gz
+```
+
+# 6. Installing
+
+## 6.1 Install taosanode
+
+Please use the following command to install taosanode in your system.
+
+```bash
+./install.sh
+```
+
+During the installation, Python virtual environment will be established in `/var/lib/taos/taosanode/venv` by default, as well as the required libraries.
+The taosanode will be installed as an system service, but will not automatic started when installed. You need to start the service mannually, by using the following command
+
+```bash
+systemctl start taosanoded
+```
+
+
+## 6.2 Configure the Service
+taosanode provides the RESTFul service powered by `uWSGI`. You can config the options to tune the
+performance by changing the default configuration file `taosanode.ini` located in `/etc/taos`, which is also the configuration directory for `taosd` service.
+
+```ini
+# taosanode service ip:port
+http = 127.0.0.1:6090
+```
+
+# 7. Running
+## 7.1 Start/Stop Service
+`systemctl start/stop/restart taosanoded.service` will start/stop/restart the service of taosanode.
+
+
+## 7.2 Uninstall
+The command `rmtaosanode` will remove the installed taosanode from your system. Note that the python environment won't removed by this script, you need to remove it mannually.
+
+# 8. Testing
+we use github Actions to run the test suit. Please refer to the file [.github/workflows/python-package.yml](https://github.com/taosdata/tdanalytics/.github/workflows/python-package.yml) for more details.
+
+
+# 9 Releasing
+For the complete list of taosanode Releases, please see Releases.
+
+# 10 CI/CD
+
+We use github Actions for CI/CD workflow configuration. Please refer to the file .github/workflows/python-package.yml for more details.
+
+
+# 11 Coverage
+
+
+# 12 Contributing
+
+Guidelines for contributing to the project:
+
+- Fork the repository
+- Create a feature branch
+- Submit a pull request
+
diff --git a/tools/tdgpt/cfg/taosanode.ini b/tools/tdgpt/cfg/taosanode.ini
new file mode 100755
index 0000000000..51782bccd6
--- /dev/null
+++ b/tools/tdgpt/cfg/taosanode.ini
@@ -0,0 +1,81 @@
+#uwsgi --ini taosanode.ini
+#uwsgi --reload taosanode.pid
+#uwsgi --stop taosanode.pid
+
+[uwsgi]
+# charset
+env = LC_ALL = en_US.UTF-8
+
+# ip:port
+http = 127.0.0.1:6090
+
+# the local unix socket file than communicate to Nginx
+#socket = 127.0.0.1:8001
+#socket-timeout = 10
+
+# base directory
+chdir = /usr/local/taos/taosanode/lib
+
+# initialize python file
+wsgi-file = /usr/local/taos/taosanode/lib/taosanalytics/app.py
+
+# call module of uWSGI
+callable = app
+
+# auto remove unix Socket and pid file when stopping
+vacuum = true
+
+# socket exec model
+#chmod-socket = 664
+
+# uWSGI pid
+uid = root
+
+# uWSGI gid
+gid = root
+
+# main process
+master = true
+
+# the number of worker processes
+processes = 2
+
+# pid file
+pidfile = /usr/local/taos/taosanode/taosanode.pid
+
+# enable threads
+enable-threads = true
+
+# the number of threads for each process
+threads = 4
+
+# memory useage report
+memory-report = true
+
+# smooth restart
+reload-mercy = 10
+
+# conflict with systemctl, so do NOT uncomment this
+# daemonize = /var/log/taos/taosanode/taosanode.log
+
+# log directory
+logto = /var/log/taos/taosanode/taosanode.log
+
+# wWSGI monitor port
+stats = 127.0.0.1:8387
+
+# python virtual environment directory
+virtualenv = /usr/local/taos/taosanode/venv/
+
+[taosanode]
+# default app log file
+app-log = /var/log/taos/taosanode/taosanode.app.log
+
+# model storage directory
+model-dir = /usr/local/taos/taosanode/model/
+
+# default log level
+log-level = DEBUG
+
+# draw the query results
+draw-result = 1
diff --git a/tools/tdgpt/cfg/taosanoded.service b/tools/tdgpt/cfg/taosanoded.service
new file mode 100755
index 0000000000..a8d86cabe7
--- /dev/null
+++ b/tools/tdgpt/cfg/taosanoded.service
@@ -0,0 +1,22 @@
+[Unit]
+Description=TaosANode Service
+After=network-online.target
+Wants=network-online.target
+
+[Service]
+Type=simple
+Environment=PATH=/usr/lib/taos/venv/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ExecStart=/usr/local/taos/taosanode/bin/start.sh
+ExecStop=/usr/local/taos/taosanode/bin/stop.sh
+TimeoutStartSec=0
+TimeoutStopSec=120s
+LimitNOFILE=1048576
+LimitNPROC=infinity
+LimitCORE=infinity
+StandardOutput=null
+Restart=always
+StartLimitBurst=6
+StartLimitInterval=60s
+
+[Install]
+WantedBy=multi-user.target
\ No newline at end of file
diff --git a/tools/tdgpt/script/install.sh b/tools/tdgpt/script/install.sh
new file mode 100755
index 0000000000..9308b37cfc
--- /dev/null
+++ b/tools/tdgpt/script/install.sh
@@ -0,0 +1,748 @@
+#!/bin/bash
+#
+# This file is used to install analysis platform on linux systems. The operating system
+# is required to use systemd to manage services at boot
+
+set -e
+
+iplist=""
+serverFqdn=""
+
+# -----------------------Variables definition---------------------
+script_dir=$(dirname $(readlink -f "$0"))
+echo -e "${script_dir}"
+
+# Dynamic directory
+PREFIX="taos"
+PRODUCTPREFIX="taosanode"
+serverName="${PRODUCTPREFIX}d"
+configFile="taosanode.ini"
+productName="TDengine Anode"
+emailName="taosdata.com"
+tarName="package.tar.gz"
+logDir="/var/log/${PREFIX}/${PRODUCTPREFIX}"
+moduleDir="/var/lib/${PREFIX}/${PRODUCTPREFIX}/model"
+venvDir="/var/lib/${PREFIX}/${PRODUCTPREFIX}/venv"
+global_conf_dir="/etc/${PREFIX}"
+installDir="/usr/local/${PREFIX}/${PRODUCTPREFIX}"
+
+python_minor_ver=0 #check the python version
+bin_link_dir="/usr/bin"
+
+#install main path
+install_main_dir=${installDir}
+
+service_config_dir="/etc/systemd/system"
+
+# Color setting
+RED='\033[0;31m'
+GREEN='\033[1;32m'
+GREEN_DARK='\033[0;32m'
+GREEN_UNDERLINE='\033[4;32m'
+NC='\033[0m'
+
+csudo=""
+if command -v sudo >/dev/null; then
+ csudo="sudo "
+fi
+
+update_flag=0
+prompt_force=0
+
+initd_mod=0
+service_mod=2
+if ps aux | grep -v grep | grep systemd &>/dev/null; then
+ service_mod=0
+elif $(which service &>/dev/null); then
+ service_mod=1
+ service_config_dir="/etc/init.d"
+ if $(which chkconfig &>/dev/null); then
+ initd_mod=1
+ elif $(which insserv &>/dev/null); then
+ initd_mod=2
+ elif $(which update-rc.d &>/dev/null); then
+ initd_mod=3
+ else
+ service_mod=2
+ fi
+else
+ service_mod=2
+fi
+
+# get the operating system type for using the corresponding init file
+# ubuntu/debian(deb), centos/fedora(rpm), others: opensuse, redhat, ..., no verification
+#osinfo=$(awk -F= '/^NAME/{print $2}' /etc/os-release)
+if [[ -e /etc/os-release ]]; then
+ osinfo=$(cat /etc/os-release | grep "NAME" | cut -d '"' -f2) || :
+else
+ osinfo=""
+fi
+#echo "osinfo: ${osinfo}"
+os_type=0
+if echo $osinfo | grep -qwi "ubuntu"; then
+ # echo "This is ubuntu system"
+ os_type=1
+elif echo $osinfo | grep -qwi "debian"; then
+ # echo "This is debian system"
+ os_type=1
+elif echo $osinfo | grep -qwi "Kylin"; then
+ # echo "This is Kylin system"
+ os_type=1
+elif echo $osinfo | grep -qwi "centos"; then
+ # echo "This is centos system"
+ os_type=2
+elif echo $osinfo | grep -qwi "fedora"; then
+ # echo "This is fedora system"
+ os_type=2
+elif echo $osinfo | grep -qwi "Linux"; then
+ # echo "This is Linux system"
+ os_type=1
+ service_mod=0
+ initd_mod=0
+ service_config_dir="/etc/systemd/system"
+else
+ echo " osinfo: ${osinfo}"
+ echo " This is an officially unverified linux system,"
+ echo " if there are any problems with the installation and operation, "
+ echo " please feel free to contact ${emailName} for support."
+ os_type=1
+fi
+
+# ============================= get input parameters =================================================
+
+# install.sh -v [server | client] -e [yes | no] -i [systemd | service | ...]
+
+# set parameters by default value
+interactiveFqdn=yes # [yes | no]
+verType=server # [server | client]
+initType=systemd # [systemd | service | ...]
+
+while getopts "hv:e:" arg; do
+ case $arg in
+ e)
+ #echo "interactiveFqdn=$OPTARG"
+ interactiveFqdn=$(echo $OPTARG)
+ ;;
+ v)
+ #echo "verType=$OPTARG"
+ verType=$(echo $OPTARG)
+ ;;
+ h)
+ echo "Usage: $(basename $0) -v [server | client] -e [yes | no]"
+ exit 0
+ ;;
+ ?) #unknow option
+ echo "unknown argument"
+ exit 1
+ ;;
+ esac
+done
+
+#echo "verType=${verType} interactiveFqdn=${interactiveFqdn}"
+
+services=(${serverName})
+
+function install_services() {
+ for service in "${services[@]}"; do
+ install_service ${service}
+ done
+}
+
+function kill_process() {
+ pid=$(ps -ef | grep "$1" | grep -v "grep" | awk '{print $2}')
+ if [ -n "$pid" ]; then
+ ${csudo}kill -9 "$pid" || :
+ fi
+}
+
+function install_main_path() {
+ #create install main dir and all sub dir
+ if [ ! -z "${install_main_dir}" ]; then
+ ${csudo}rm -rf ${install_main_dir} || :
+ fi
+
+ ${csudo}mkdir -p ${install_main_dir}
+ ${csudo}mkdir -p ${install_main_dir}/cfg
+ ${csudo}mkdir -p ${install_main_dir}/bin
+ ${csudo}mkdir -p ${install_main_dir}/lib
+ ${csudo}mkdir -p ${global_conf_dir}
+}
+
+function install_bin_and_lib() {
+ ${csudo}cp -r ${script_dir}/bin/* ${install_main_dir}/bin
+ ${csudo}cp -r ${script_dir}/lib/* ${install_main_dir}/lib/
+
+ if [[ ! -e "${bin_link_dir}/rmtaosanode" ]]; then
+ ${csudo}ln -s ${install_main_dir}/bin/uninstall.sh ${bin_link_dir}/rmtaosanode
+ fi
+}
+
+function add_newHostname_to_hosts() {
+ localIp="127.0.0.1"
+ OLD_IFS="$IFS"
+ IFS=" "
+ iphost=$(cat /etc/hosts | grep $1 | awk '{print $1}')
+ arr=($iphost)
+ IFS="$OLD_IFS"
+ for s in "${arr[@]}"; do
+ if [[ "$s" == "$localIp" ]]; then
+ return
+ fi
+ done
+
+ if grep -q "127.0.0.1 $1" /etc/hosts; then
+ return
+ else
+ ${csudo}chmod 666 /etc/hosts
+ ${csudo}echo "127.0.0.1 $1" >>/etc/hosts
+ fi
+}
+
+function set_hostname() {
+ echo -e -n "${GREEN}Host name or IP (assigned to this machine) which can be accessed by your tools or apps (must not be 'localhost')${NC}"
+ read -e -p " : " -i "$(hostname)" newHostname
+ while true; do
+ if [ -z "$newHostname" ]; then
+ newHostname=$(hostname)
+ break
+ elif [ "$newHostname" != "localhost" ]; then
+ break
+ else
+ echo -e -n "${GREEN}Host name or IP (assigned to this machine) which can be accessed by your tools or apps (must not be 'localhost')${NC}"
+ read -e -p " : " -i "$(hostname)" newHostname
+ fi
+ done
+
+ if [ -f ${global_conf_dir}/${configFile} ]; then
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$newHostname/" ${global_conf_dir}/${configFile}
+ else
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$newHostname/" ${script_dir}/cfg/${configFile}
+ fi
+ serverFqdn=$newHostname
+
+ if [[ -e /etc/hosts ]] && [[ ! $newHostname =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
+ add_newHostname_to_hosts $newHostname
+ fi
+}
+
+function is_correct_ipaddr() {
+ newIp=$1
+ OLD_IFS="$IFS"
+ IFS=" "
+ arr=($iplist)
+ IFS="$OLD_IFS"
+ for s in "${arr[@]}"; do
+ if [[ "$s" == "$newIp" ]]; then
+ return 0
+ fi
+ done
+
+ return 1
+}
+
+function set_ipAsFqdn() {
+ iplist=$(ip address | grep inet | grep -v inet6 | grep -v 127.0.0.1 | awk '{print $2}' | awk -F "/" '{print $1}') || :
+ if [ -z "$iplist" ]; then
+ iplist=$(ifconfig | grep inet | grep -v inet6 | grep -v 127.0.0.1 | awk '{print $2}' | awk -F ":" '{print $2}') || :
+ fi
+
+ if [ -z "$iplist" ]; then
+ echo
+ echo -e -n "${GREEN}Unable to get local ip, use 127.0.0.1${NC}"
+ localFqdn="127.0.0.1"
+ # Write the local FQDN to configuration file
+
+ if [ -f ${global_conf_dir}/${configFile} ]; then
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$localFqdn/" ${global_conf_dir}/${configFile}
+ else
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$localFqdn/" ${script_dir}/cfg/${configFile}
+ fi
+ serverFqdn=$localFqdn
+ echo
+ return
+ fi
+
+ echo -e -n "${GREEN}Please choose an IP from local IP list${NC}:"
+ echo
+ echo -e -n "${GREEN}$iplist${NC}"
+ echo
+ echo
+ echo -e -n "${GREEN}Notes: if IP is used as the node name, data can NOT be migrated to other machine directly${NC}:"
+ read localFqdn
+ while true; do
+ if [ ! -z "$localFqdn" ]; then
+ # Check if correct ip address
+ is_correct_ipaddr $localFqdn
+ retval=$(echo $?)
+ if [[ $retval != 0 ]]; then
+ read -p "Please choose an IP from local IP list:" localFqdn
+ else
+ # Write the local FQDN to configuration file
+ if [ -f ${global_conf_dir}/${configFile} ]; then
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$localFqdn/" ${global_conf_dir}/${configFile}
+ else
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$localFqdn/" ${script_dir}/cfg/${configFile}
+ fi
+ serverFqdn=$localFqdn
+ break
+ fi
+ else
+ read -p "Please choose an IP from local IP list:" localFqdn
+ fi
+ done
+}
+
+function local_fqdn_check() {
+ #serverFqdn=$(hostname)
+ echo
+ echo -e -n "System hostname is: ${GREEN}$serverFqdn${NC}"
+ echo
+ set_hostname
+}
+
+function install_anode_config() {
+ fileName="${script_dir}/cfg/${configFile}"
+ echo -e $fileName
+
+ if [ -f ${fileName} ]; then
+ ${csudo}sed -i -r "s/#*\s*(fqdn\s*).*/\1$serverFqdn/" ${script_dir}/cfg/${configFile}
+
+ if [ -f "${global_conf_dir}/${configFile}" ]; then
+ ${csudo}cp ${fileName} ${global_conf_dir}/${configFile}.new
+ else
+ ${csudo}cp ${fileName} ${global_conf_dir}/${configFile}
+ fi
+ fi
+
+ ${csudo}ln -sf ${global_conf_dir}/${configFile} ${install_main_dir}/cfg
+}
+
+function install_config() {
+
+ [ ! -z $1 ] && return 0 || : # only install client
+
+ if ((${update_flag} == 1)); then
+ install_taosd_config
+ return 0
+ fi
+
+ if [ "$interactiveFqdn" == "no" ]; then
+ install_taosd_config
+ return 0
+ fi
+
+ local_fqdn_check
+ install_anode_config
+
+ echo
+ echo -e -n "${GREEN}Enter FQDN:port (like h1.${emailName}:6030) of an existing ${productName} cluster node to join${NC}"
+ echo
+ echo -e -n "${GREEN}OR leave it blank to build one${NC}:"
+ read firstEp
+ while true; do
+ if [ ! -z "$firstEp" ]; then
+ if [ -f ${global_conf_dir}/${configFile} ]; then
+ ${csudo}sed -i -r "s/#*\s*(firstEp\s*).*/\1$firstEp/" ${global_conf_dir}/${configFile}
+ else
+ ${csudo}sed -i -r "s/#*\s*(firstEp\s*).*/\1$firstEp/" ${script_dir}/cfg/${configFile}
+ fi
+ break
+ else
+ break
+ fi
+ done
+
+ echo
+ echo -e -n "${GREEN}Enter your email address for priority support or enter empty to skip${NC}: "
+ read emailAddr
+ while true; do
+ if [ ! -z "$emailAddr" ]; then
+ email_file="${install_main_dir}/email"
+ ${csudo}bash -c "echo $emailAddr > ${email_file}"
+ break
+ else
+ break
+ fi
+ done
+}
+
+function install_log() {
+ ${csudo}mkdir -p ${logDir} && ${csudo}chmod 777 ${logDir}
+ ${csudo}ln -sf ${logDir} ${install_main_dir}/log
+}
+
+function install_module() {
+ ${csudo}mkdir -p ${moduleDir} && ${csudo}chmod 777 ${moduleDir}
+ ${csudo}ln -sf ${moduleDir} ${install_main_dir}/model
+}
+
+function install_anode_venv() {
+ ${csudo}mkdir -p ${venvDir} && ${csudo}chmod 777 ${venvDir}
+ ${csudo}ln -sf ${venvDir} ${install_main_dir}/venv
+
+ # build venv
+ ${csudo}python3.${python_minor_ver} -m venv ${venvDir}
+
+ echo -e "active Python3 virtual env: ${venvDir}"
+ source ${venvDir}/bin/activate
+
+ echo -e "install the required packages by pip3, this may take a while depending on the network condition"
+ ${csudo}${venvDir}/bin/pip3 install numpy==1.26.4
+ ${csudo}${venvDir}/bin/pip3 install pandas==1.5.0
+
+ ${csudo}${venvDir}/bin/pip3 install scikit-learn
+ ${csudo}${venvDir}/bin/pip3 install outlier_utils
+ ${csudo}${venvDir}/bin/pip3 install statsmodels
+ ${csudo}${venvDir}/bin/pip3 install pyculiarity
+ ${csudo}${venvDir}/bin/pip3 install pmdarima
+ ${csudo}${venvDir}/bin/pip3 install flask
+ ${csudo}${venvDir}/bin/pip3 install matplotlib
+ ${csudo}${venvDir}/bin/pip3 install uwsgi
+ ${csudo}${venvDir}/bin/pip3 install torch --index-url https://download.pytorch.org/whl/cpu
+ ${csudo}${venvDir}/bin/pip3 install --upgrade keras
+
+ echo -e "Install python library for venv completed!"
+}
+
+function clean_service_on_sysvinit() {
+ if ps aux | grep -v grep | grep $1 &>/dev/null; then
+ ${csudo}service $1 stop || :
+ fi
+
+ if ((${initd_mod} == 1)); then
+ if [ -e ${service_config_dir}/$1 ]; then
+ ${csudo}chkconfig --del $1 || :
+ fi
+ elif ((${initd_mod} == 2)); then
+ if [ -e ${service_config_dir}/$1 ]; then
+ ${csudo}insserv -r $1 || :
+ fi
+ elif ((${initd_mod} == 3)); then
+ if [ -e ${service_config_dir}/$1 ]; then
+ ${csudo}update-rc.d -f $1 remove || :
+ fi
+ fi
+
+ ${csudo}rm -f ${service_config_dir}/$1 || :
+
+ if $(which init &>/dev/null); then
+ ${csudo}init q || :
+ fi
+}
+
+function install_service_on_sysvinit() {
+ if [ "$1" != "${serverName}" ]; then
+ return
+ fi
+
+ clean_service_on_sysvinit $1
+ sleep 1
+
+ if ((${os_type} == 1)); then
+ ${csudo}cp ${script_dir}/init.d/${serverName}.deb ${service_config_dir}/${serverName} && ${csudo}chmod a+x ${service_config_dir}/${serverName}
+ elif ((${os_type} == 2)); then
+ ${csudo}cp ${script_dir}/init.d/${serverName}.rpm ${service_config_dir}/${serverName} && ${csudo}chmod a+x ${service_config_dir}/${serverName}
+ fi
+
+ if ((${initd_mod} == 1)); then
+ ${csudo}chkconfig --add $1 || :
+ ${csudo}chkconfig --level 2345 $1 on || :
+ elif ((${initd_mod} == 2)); then
+ ${csudo}insserv $1} || :
+ ${csudo}insserv -d $1 || :
+ elif ((${initd_mod} == 3)); then
+ ${csudo}update-rc.d $1 defaults || :
+ fi
+}
+
+function clean_service_on_systemd() {
+ service_config="${service_config_dir}/$1.service"
+
+ if systemctl is-active --quiet $1; then
+ echo "$1 is running, stopping it..."
+ ${csudo}systemctl stop $1 &>/dev/null || echo &>/dev/null
+ fi
+ ${csudo}systemctl disable $1 &>/dev/null || echo &>/dev/null
+ ${csudo}rm -f ${service_config}
+}
+
+function install_service_on_systemd() {
+ clean_service_on_systemd $1
+
+ cfg_source_dir=${script_dir}/cfg
+ if [[ "$1" == "${xname}" || "$1" == "${explorerName}" ]]; then
+ cfg_source_dir=${script_dir}/cfg
+ fi
+
+ if [ -f ${cfg_source_dir}/$1.service ]; then
+ ${csudo}cp ${cfg_source_dir}/$1.service ${service_config_dir}/ || :
+ fi
+
+ ${csudo}systemctl enable $1
+ ${csudo}systemctl daemon-reload
+}
+
+function install_service() {
+ if ((${service_mod} == 0)); then
+ install_service_on_systemd $1
+ elif ((${service_mod} == 1)); then
+ install_service_on_sysvinit $1
+ else
+ kill_process $1
+ fi
+}
+
+vercomp() {
+ if [[ $1 == $2 ]]; then
+ return 0
+ fi
+ local IFS=.
+ local i ver1=($1) ver2=($2)
+ # fill empty fields in ver1 with zeros
+ for ((i = ${#ver1[@]}; i < ${#ver2[@]}; i++)); do
+ ver1[i]=0
+ done
+
+ for ((i = 0; i < ${#ver1[@]}; i++)); do
+ if [[ -z ${ver2[i]} ]]; then
+ # fill empty fields in ver2 with zeros
+ ver2[i]=0
+ fi
+ if ((10#${ver1[i]} > 10#${ver2[i]})); then
+ return 1
+ fi
+ if ((10#${ver1[i]} < 10#${ver2[i]})); then
+ return 2
+ fi
+ done
+ return 0
+}
+
+function is_version_compatible() {
+
+ curr_version=$(ls ${script_dir}/driver/libtaos.so* | awk -F 'libtaos.so.' '{print $2}')
+
+ if [ -f ${script_dir}/driver/vercomp.txt ]; then
+ min_compatible_version=$(cat ${script_dir}/driver/vercomp.txt)
+ else
+ min_compatible_version=$(${script_dir}/bin/${serverName} -V | grep version | head -1 | cut -d ' ' -f 5)
+ fi
+
+ exist_version=$(${installDir}/bin/${serverName} -V | grep version | head -1 | cut -d ' ' -f 3)
+ vercomp $exist_version "3.0.0.0"
+ case $? in
+ 2)
+ prompt_force=1
+ ;;
+ esac
+
+ vercomp $curr_version $min_compatible_version
+ echo "" # avoid $? value not update
+
+ case $? in
+ 0) return 0 ;;
+ 1) return 0 ;;
+ 2) return 1 ;;
+ esac
+}
+
+deb_erase() {
+ confirm=""
+ while [ "" == "${confirm}" ]; do
+ echo -e -n "${RED}Existing TDengine deb is detected, do you want to remove it? [yes|no] ${NC}:"
+ read confirm
+ if [ "yes" == "$confirm" ]; then
+ ${csudo}dpkg --remove tdengine || :
+ break
+ elif [ "no" == "$confirm" ]; then
+ break
+ fi
+ done
+}
+
+rpm_erase() {
+ confirm=""
+ while [ "" == "${confirm}" ]; do
+ echo -e -n "${RED}Existing TDengine rpm is detected, do you want to remove it? [yes|no] ${NC}:"
+ read confirm
+ if [ "yes" == "$confirm" ]; then
+ ${csudo}rpm -e tdengine || :
+ break
+ elif [ "no" == "$confirm" ]; then
+ break
+ fi
+ done
+}
+
+function updateProduct() {
+ # Check if version compatible
+ if ! is_version_compatible; then
+ echo -e "${RED}Version incompatible${NC}"
+ return 1
+ fi
+
+ # Start to update
+ if [ ! -e ${tarName} ]; then
+ echo "File ${tarName} does not exist"
+ exit 1
+ fi
+
+ if echo $osinfo | grep -qwi "centos"; then
+ rpm -q tdengine 2>&1 >/dev/null && rpm_erase tdengine || :
+ elif echo $osinfo | grep -qwi "ubuntu"; then
+ dpkg -l tdengine 2>&1 | grep ii >/dev/null && deb_erase tdengine || :
+ fi
+
+ tar -zxf ${tarName}
+
+ echo "Start to update ${productName}..."
+ # Stop the service if running
+ if ps aux | grep -v grep | grep ${serverName} &>/dev/null; then
+ if ((${service_mod} == 0)); then
+ ${csudo}systemctl stop ${serverName} || :
+ elif ((${service_mod} == 1)); then
+ ${csudo}service ${serverName} stop || :
+ else
+ kill_process ${serverName}
+ fi
+ sleep 1
+ fi
+
+ install_main_path
+ install_log
+ install_module
+ install_config
+
+ if [ -z $1 ]; then
+ install_bin
+ install_services
+
+ echo
+ echo -e "${GREEN_DARK}To configure ${productName} ${NC}\t\t: edit ${global_conf_dir}/${configFile}"
+ [ -f ${global_conf_dir}/${adapterName}.toml ] && [ -f ${installDir}/bin/${adapterName} ] &&
+ echo -e "${GREEN_DARK}To configure ${adapterName} ${NC}\t: edit ${global_conf_dir}/${adapterName}.toml"
+ echo -e "${GREEN_DARK}To configure ${explorerName} ${NC}\t: edit ${global_conf_dir}/explorer.toml"
+ if ((${service_mod} == 0)); then
+ echo -e "${GREEN_DARK}To start ${productName} server ${NC}\t: ${csudo}systemctl start ${serverName}${NC}"
+ elif ((${service_mod} == 1)); then
+ echo -e "${GREEN_DARK}To start ${productName} server ${NC}\t: ${csudo}service ${serverName} start${NC}"
+ else
+ echo -e "${GREEN_DARK}To start ${productName} server ${NC}\t: ./${serverName}${NC}"
+ fi
+
+ echo
+ echo "${productName} is updated successfully!"
+ echo
+
+ else
+ install_bin
+ fi
+
+ cd $script_dir
+ rm -rf $(tar -tf ${tarName} | grep -Ev "^\./$|^\/")
+}
+
+function installProduct() {
+ # Start to install
+ if [ ! -e ${tarName} ]; then
+ echo "File ${tarName} does not exist"
+ exit 1
+ fi
+
+ tar -zxf ${tarName}
+
+ echo "Start to install ${productName}..."
+
+ install_main_path
+ install_log
+ install_anode_config
+ install_module
+
+ install_bin_and_lib
+ install_services
+
+ echo
+ echo -e "\033[44;32;1m${productName} is installed successfully!${NC}"
+
+ echo
+ echo -e "\033[44;32;1mStart to create virtual python env in ${venvDir}${NC}"
+ install_anode_venv
+}
+
+# check for python version, only the 3.10/3.11 is supported
+check_python3_env() {
+ if ! command -v python3 &> /dev/null
+ then
+ echo -e "\033[31mWarning: Python3 command not found. Version 3.10/3.11 is required.\033[0m"
+ exit 1
+ fi
+
+ python3_version=$(python3 --version 2>&1 | awk -F' ' '{print $2}')
+
+ python3_version_ok=false
+ python_minor_ver=$(echo "$python3_version" | cut -d"." -f2)
+ if [[ $(echo "$python3_version" | cut -d"." -f1) -eq 3 && $(echo "$python3_version" | cut -d"." -f2) -ge 10 ]]; then
+ python3_version_ok=true
+ fi
+
+ if $python3_version_ok; then
+ echo -e "\033[32mPython3 ${python3_version} has been found.\033[0m"
+ else
+ if command -v python3.10 &> /dev/null
+ then
+ echo -e "\033[32mPython3.10 has been found.\033[0m"
+ python_minor_ver=10
+ elif command -v python3.11 &> /dev/null
+ then
+ python_minor_ver=11
+ echo -e "\033[32mPython3.11 has been found.\033[0m"
+ else
+ echo -e "\033[31mWarning: Python3.10/3.11 is required, only found python${python3_version}.\033[0m"
+ exit 1
+ fi
+ fi
+
+# echo -e "Python3 minor version is:${python_minor_ver}"
+
+ # check the existence pip3.10/pip3.11
+ if ! command -v pip3 &> /dev/null
+ then
+ echo -e "\033[31mWarning: Pip3 command not found. Version 3.10/3.11 is required.\033[0m"
+ exit 1
+ fi
+
+ pip3_version=$(pip3 --version 2>&1 | awk -F' ' '{print $6}' | cut -d")" -f1)
+ major_ver=$(echo "${pip3_version}" | cut -d"." -f1)
+ minor_ver=$(echo "${pip3_version}" | cut -d"." -f2)
+
+ pip3_version_ok=false;
+ if [[ ${major_ver} -eq 3 && ${minor_ver} -ge 10 ]]; then
+ pip3_version_ok=true
+ fi
+
+ if $pip3_version_ok; then
+ echo -e "\033[32mpip3 ${pip3_version} has been found.\033[0m"
+ else
+ if command -v pip3.${python_minor_ver} &> /dev/null
+ then
+ echo -e "\033[32mpip3.${python_minor_ver} has been found.\033[0m"
+ else
+ echo -e "\033[31mWarning: pip3.10/3.11 is required, only found pip${pip3_version}.\033[0m"
+ exit 1
+ fi
+ fi
+
+# if ! command -v python3.${python_minor_ver}-venv &> /dev/null
+# then
+# echo -e "\033[31mWarning: python3.${python_minor_ver}-venv command not found.\033[0m"
+# exit 1
+# fi
+}
+
+## ==============================Main program starts from here============================
+serverFqdn=$(hostname)
+
+if [ "$verType" == "server" ]; then
+ check_python3_env
+ installProduct
+fi
diff --git a/tools/tdgpt/script/release.sh b/tools/tdgpt/script/release.sh
new file mode 100755
index 0000000000..c143357eb1
--- /dev/null
+++ b/tools/tdgpt/script/release.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+# Generate install package for all os system
+
+set -e
+# set -x
+
+curr_dir=$(pwd)
+compile_dir=$1
+version="1.0.1"
+osType=
+pagMode=
+productName="TDengine-enterprise-anode"
+
+script_dir="$(dirname $(readlink -f $0))"
+top_dir="$(readlink -f ${script_dir}/..)"
+
+echo -e ${top_dir}
+
+serverName="taosanoded"
+configFile="taosanode.ini"
+tarName="package.tar.gz"
+
+# create compressed install file.
+build_dir="${compile_dir}/build"
+release_dir="${top_dir}/release"
+
+#package_name='linux'
+install_dir="${release_dir}/${productName}-${version}"
+
+if [ "$pagMode" == "lite" ]; then
+ strip ${build_dir}/bin/${serverName}
+fi
+
+cfg_dir="${top_dir}/cfg"
+install_files="${script_dir}/install.sh"
+
+# make directories.
+mkdir -p ${install_dir}
+mkdir -p ${install_dir}/cfg && cp ${cfg_dir}/${configFile} ${install_dir}/cfg/${configFile}
+
+if [ -f "${cfg_dir}/${serverName}.service" ]; then
+ cp ${cfg_dir}/${serverName}.service ${install_dir}/cfg || :
+fi
+
+# python files
+mkdir -p ${install_dir}/bin && mkdir -p ${install_dir}/lib
+
+# script to control start/stop/uninstall process
+rm -r ${top_dir}/taosanalytics/*.pyc || :
+cp -r ${top_dir}/taosanalytics/ ${install_dir}/lib/ && chmod a+x ${install_dir}/lib/ || :
+cp -r ${top_dir}/script/st*.sh ${install_dir}/bin/ && chmod a+x ${install_dir}/bin/* || :
+cp -r ${top_dir}/script/uninstall.sh ${install_dir}/bin/ && chmod a+x ${install_dir}/bin/* || :
+
+cd ${install_dir}
+
+#if [ "$osType" != "Darwin" ]; then
+# tar -zcv -f ${tarName} ./bin/* || :
+# rm -rf ${install_dir}/bin || :
+#else
+tar -zcv -f ${tarName} ./lib/* || :
+
+if [ ! -z "${install_dir}" ]; then
+ # shellcheck disable=SC2115
+ rm -rf "${install_dir}"/lib || :
+fi
+
+exitcode=$?
+if [ "$exitcode" != "0" ]; then
+ echo "tar ${tarName} error !!!"
+ exit $exitcode
+fi
+
+cd ${curr_dir}
+cp ${install_files} ${install_dir}
+
+chmod a+x ${install_dir}/install.sh
+
+# Copy release note
+# cp ${script_dir}/release_note ${install_dir}
+
+# exit 1
+cd ${release_dir}
+
+pkg_name=${install_dir}
+echo -e "pkg_name is: ${pkg_name}"
+
+if [ "$osType" != "Darwin" ]; then
+ tar -zcv -f "$(basename ${pkg_name}).tar.gz" "$(basename ${install_dir})" --remove-files || :
+else
+ tar -zcv -f "$(basename ${pkg_name}).tar.gz" "$(basename ${install_dir})" || :
+ rm -rf "${install_dir}" ||:
+fi
+
+exitcode=$?
+if [ "$exitcode" != "0" ]; then
+ echo "tar ${pkg_name}.tar.gz error !!!"
+ exit $exitcode
+fi
+
+cd ${curr_dir}
diff --git a/tools/tdgpt/script/start.sh b/tools/tdgpt/script/start.sh
new file mode 100755
index 0000000000..144ee08392
--- /dev/null
+++ b/tools/tdgpt/script/start.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# start the flask service by using uwsgi
+/usr/local/taos/taosanode/venv/bin/uwsgi /usr/local/taos/taosanode/cfg/taosanode.ini
\ No newline at end of file
diff --git a/tools/tdgpt/script/stop.sh b/tools/tdgpt/script/stop.sh
new file mode 100755
index 0000000000..c6f81d05cc
--- /dev/null
+++ b/tools/tdgpt/script/stop.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# stop the uwsgi server
+/usr/local/taos/taosanode/venv/bin/uwsgi --stop /usr/local/taos/taosanode/taosanode.pid
\ No newline at end of file
diff --git a/tools/tdgpt/script/uninstall.sh b/tools/tdgpt/script/uninstall.sh
new file mode 100755
index 0000000000..29c62f9782
--- /dev/null
+++ b/tools/tdgpt/script/uninstall.sh
@@ -0,0 +1,220 @@
+#!/bin/bash
+# uninstall the deployed app info, not remove the python virtual environment
+
+set -e
+#set -x
+
+osType=`uname`
+
+RED='\033[0;31m'
+GREEN='\033[1;32m'
+NC='\033[0m'
+
+MAIN_NAME="taosanode"
+installDir="/usr/local/taos/taosanode"
+venv_dir="/usr/local/taos/taosanode/venv"
+serverName="${MAIN_NAME}d"
+uninstallName="rmtaosanode"
+productName="TDengine Enterprise ANode"
+
+if [ "$osType" != "Darwin" ]; then
+ bin_link_dir="/usr/bin"
+else
+ bin_link_dir="/usr/local/bin"
+fi
+
+#install main path
+bin_dir=${installDir}/bin
+lib_dir=${installDir}/lib
+local_log_dir=${installDir}/log
+local_conf_dir=${installDir}/cfg
+local_model_dir=${installDir}/model
+
+global_log_dir="/var/log/taos/${MAIN_NAME}"
+global_conf_dir="/etc/taos/"
+
+service_config_dir="/etc/systemd/system"
+
+services=(${serverName} ${uninstallName})
+
+csudo=""
+if command -v sudo >/dev/null; then
+ csudo="sudo "
+fi
+
+initd_mod=0
+service_mod=2
+
+if ps aux | grep -v grep | grep systemd &>/dev/null; then
+ service_mod=0
+elif $(which service &>/dev/null); then
+ service_mod=1
+ service_config_dir="/etc/init.d"
+ if $(which chkconfig &>/dev/null); then
+ initd_mod=1
+ elif $(which insserv &>/dev/null); then
+ initd_mod=2
+ elif $(which update-rc.d &>/dev/null); then
+ initd_mod=3
+ else
+ service_mod=2
+ fi
+else
+ service_mod=2
+fi
+
+kill_service_of() {
+ _service=$1
+ pid=$(ps -ef | grep $_service | grep -v grep | awk '{print $2}')
+ if [ -n "$pid" ]; then
+ "${csudo}"${installDir}/bin/stop.sh:
+ fi
+}
+
+clean_service_on_systemd_of() {
+ _service=$1
+ _service_config="${service_config_dir}/${_service}.service"
+ if systemctl is-active --quiet ${_service}; then
+ echo "${_service} is running, stopping it..."
+ "${csudo}"systemctl stop ${_service} &>/dev/null || echo &>/dev/null
+ fi
+
+ "${csudo}"systemctl disable ${_service} &>/dev/null || echo &>/dev/null
+
+ if [[ ! -z "${_service_config}" && -f "${_service_config}" ]]; then
+ ${csudo}rm ${_service_config}
+ fi
+}
+
+clean_service_on_sysvinit_of() {
+ _service=$1
+ if pidof ${_service} &>/dev/null; then
+ echo "${_service} is running, stopping it..."
+ "${csudo}"service ${_service} stop || :
+ fi
+ if ((${initd_mod} == 1)); then
+ if [ -e ${service_config_dir}/${_service} ]; then
+ # shellcheck disable=SC2086
+ ${csudo}chkconfig --del ${_service} || :
+ fi
+ elif ((${initd_mod} == 2)); then
+ if [ -e ${service_config_dir}/${_service} ]; then
+ ${csudo}insserv -r ${_service} || :
+ fi
+ elif ((${initd_mod} == 3)); then
+ if [ -e ${service_config_dir}/${_service} ]; then
+ ${csudo}update-rc.d -f ${_service} remove || :
+ fi
+ fi
+
+ # shellcheck disable=SC2236
+ if [ ! -z "${service_config_dir}" ]; then
+ echo -e "rm ${service_config_dir}/${_service}"
+ fi
+
+ #${csudo}rm ${service_config_dir}/${_service} || :
+
+ if $(which init &>/dev/null); then
+ ${csudo}init q || :
+ fi
+}
+
+clean_service_of() {
+ if ((${service_mod} == 0)); then
+ clean_service_on_systemd_of $_service
+ elif ((${service_mod} == 1)); then
+ clean_service_on_sysvinit_of $_service
+ else
+ kill_service_of $_service
+ fi
+}
+
+remove_service_of() {
+ _service=$1
+ clean_service_of ${_service}
+ if [[ -e "${bin_link_dir}/${_service}" ]]; then
+ ${csudo}rm "${bin_link_dir}"/${_service}
+ echo "${_service} is removed successfully!"
+ fi
+}
+
+remove_service() {
+ for _service in "${services[@]}"; do
+ remove_service_of "${_service}"
+ done
+}
+
+function clean_venv() {
+ # Remove python virtual environment
+ #${csudo}rm ${venv_dir}/* || :
+ if [ ! -z "${venv_dir}" ]; then
+ echo -e "${csudo}rm -rf ${venv_dir}/*"
+ fi
+}
+
+function clean_module() {
+ if [ ! -z "${local_model_dir}" ]; then
+ ${csudo}unlink ${local_model_dir} || :
+ fi
+}
+
+function clean_config() {
+ # Remove config file
+ if [ ! -z "${global_conf_dir}" ]; then
+ ${csudo}rm -f ${global_conf_dir}/taosanode.ini || :
+ fi
+
+ if [ ! -z "${local_conf_dir}" ]; then
+ # shellcheck disable=SC2086
+ ${csudo}rm -rf ${local_conf_dir} || :
+ fi
+}
+
+function clean_log() {
+ # Remove log files
+ if [ ! -z "${global_log_dir}" ]; then
+ ${csudo}rm -rf ${global_log_dir} || :
+ fi
+
+ if [ ! -z "${local_log_dir}" ]; then
+ ${csudo}rm -rf ${local_log_dir} || :
+ fi
+}
+
+function remove_deploy_binary() {
+ if [ ! -z "${bin_dir}" ]; then
+ ${csudo}rm -rf ${bin_dir} || :
+ fi
+
+ if [ ! -z "${lib_dir}" ]; then
+ ${csudo}rm -rf ${lib_dir}
+ fi
+}
+
+remove_service
+clean_log # Remove link log directory
+clean_config # Remove link configuration file
+remove_deploy_binary
+clean_venv
+
+if [[ -e /etc/os-release ]]; then
+ osinfo=$(awk -F= '/^NAME/{print $2}' /etc/os-release)
+else
+ osinfo=""
+fi
+
+if echo $osinfo | grep -qwi "ubuntu"; then
+ # echo "this is ubuntu system"
+ ${csudo}dpkg --force-all -P tdengine >/dev/null 2>&1 || :
+elif echo $osinfo | grep -qwi "debian"; then
+ # echo "this is debian system"
+ ${csudo}dpkg --force-all -P tdengine >/dev/null 2>&1 || :
+elif echo $osinfo | grep -qwi "centos"; then
+ # echo "this is centos system"
+ ${csudo}rpm -e --noscripts tdengine >/dev/null 2>&1 || :
+fi
+
+command -v systemctl >/dev/null 2>&1 && ${csudo}systemctl daemon-reload >/dev/null 2>&1 || true
+echo
+echo "${productName} is uninstalled successfully!"
+echo
diff --git a/tools/tdgpt/taosanalytics/__init__.py b/tools/tdgpt/taosanalytics/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/algo/__init__.py b/tools/tdgpt/taosanalytics/algo/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/algo/ad/__init__.py b/tools/tdgpt/taosanalytics/algo/ad/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/algo/ad/autoencoder.py b/tools/tdgpt/taosanalytics/algo/ad/autoencoder.py
new file mode 100644
index 0000000000..0d3bb21faa
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/autoencoder.py
@@ -0,0 +1,117 @@
+# encoding:utf-8
+# pylint: disable=c0103
+""" auto encoder algorithms to detect anomaly for time series data"""
+import os.path
+
+import joblib
+import numpy as np
+import pandas as pd
+
+from taosanalytics.conf import app_logger, conf
+from taosanalytics.misc.train_model import create_sequences
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _AutoEncoderDetectionService(AbstractAnomalyDetectionService):
+ name = 'ad_encoder'
+ desc = "anomaly detection based on auto encoder"
+
+ def __init__(self):
+ super().__init__()
+
+ self.table_name = None
+ self.mean = None
+ self.std = None
+ self.threshold = None
+ self.time_interval = None
+ self.model = None
+ self.dir = 'ad_autoencoder'
+
+ self.root_path = conf.get_model_directory()
+
+ self.root_path = self.root_path + f'/{self.dir}/'
+
+ if not os.path.exists(self.root_path):
+ app_logger.log_inst.error(
+ "%s ad algorithm failed to locate default module directory:"
+ "%s, not active", self.__class__.__name__, self.root_path)
+ else:
+ app_logger.log_inst.info("%s ad algorithm root path is: %s", self.__class__.__name__,
+ self.root_path)
+
+ def execute(self):
+ if self.input_is_empty():
+ return []
+
+ if self.model is None:
+ raise FileNotFoundError("not load autoencoder model yet, or load model failed")
+
+ array_2d = np.reshape(self.list, (len(self.list), 1))
+ df = pd.DataFrame(array_2d)
+
+ # normalize input data using z-score
+ normalized_list = (df - self.mean.value) / self.std.value
+ seq = create_sequences(normalized_list.values, self.time_interval)
+
+ # Get test MAE loss.
+ pred_list = self.model.predict(seq)
+ mae_loss = np.mean(np.abs(pred_list - seq), axis=1)
+ mae = mae_loss.reshape((-1))
+
+ # Detect all the samples which are anomalies.
+ anomalies = mae > self.threshold
+
+ # syslogger.log_inst(
+ # "Number of anomaly samples: %f, Indices of anomaly samples:{}".
+ # format(np.sum(anomalies), np.where(anomalies))
+ # )
+
+ # data i is an anomaly if samples [(i - timesteps + 1) to (i)] are anomalies
+ ad_indices = []
+ for data_idx in range(self.time_interval - 1,
+ len(normalized_list) - self.time_interval + 1):
+ if np.all(anomalies[data_idx - self.time_interval + 1: data_idx]):
+ ad_indices.append(data_idx)
+
+ return [-1 if i in ad_indices else 1 for i in range(len(self.list))]
+
+ def set_params(self, params):
+
+ if "model" not in params:
+ raise ValueError("model needs to be specified")
+
+ name = params['model']
+
+ module_file_path = f'{self.root_path}/{name}.dat'
+ module_info_path = f'{self.root_path}/{name}.info'
+
+ app_logger.log_inst.info("try to load module:%s", module_file_path)
+
+ if os.path.exists(module_file_path):
+ self.model = joblib.load(module_file_path)
+ else:
+ app_logger.log_inst.error("failed to load autoencoder model file: %s", module_file_path)
+ raise FileNotFoundError(f"{module_file_path} not found")
+
+ if os.path.exists(module_info_path):
+ info = joblib.load(module_info_path)
+ else:
+ app_logger.log_inst.error("failed to load autoencoder model file: %s", module_file_path)
+ raise FileNotFoundError("%s not found", module_info_path)
+
+ if info is not None:
+ self.mean = info["mean"]
+ self.std = info["std"]
+ self.threshold = info["threshold"]
+ self.time_interval = info["timesteps"]
+
+ app_logger.log_inst.info(
+ "load ac module success, mean: %f, std: %f, threshold: %f, time_interval: %d",
+ self.mean[0], self.std[0], self.threshold, self.time_interval
+ )
+ else:
+ app_logger.log_inst.error("failed to load %s model", name)
+ raise RuntimeError(f"failed to load model {name}")
+
+ def get_params(self):
+ return {"dir": self.dir + '/*'}
diff --git a/tools/tdgpt/taosanalytics/algo/ad/grubbs.py b/tools/tdgpt/taosanalytics/algo/ad/grubbs.py
new file mode 100644
index 0000000000..6318c109f5
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/grubbs.py
@@ -0,0 +1,42 @@
+# encoding:utf-8
+""" grubbs algorithm class"""
+
+from outliers import smirnov_grubbs as grubbs
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _GrubbsService(AbstractAnomalyDetectionService):
+ """ Grubbs algorithm is to check the anomaly data in the input list """
+ name = 'grubbs'
+ desc = """Grubbs' test is to detect the presence of one outlier in a data set that is normally
+ distributed"""
+
+ def __init__(self, alpha_val=0.95):
+ super().__init__()
+
+ if alpha_val <= 0 or alpha_val >= 1:
+ raise ValueError("invalid alpha value, valid range is (0, 1)")
+ self.alpha = 1 - alpha_val
+
+ def execute(self):
+ """perform Grubbs' test and identify (if any) the outlier"""
+ if self.input_is_empty():
+ return []
+
+ res = grubbs.test(self.list, alpha=self.alpha)
+
+ error_indicator = [1 if k in set(res) else -1 for k in self.list]
+ return error_indicator
+
+ def set_params(self, params):
+ """ set the value of alpha """
+ super().set_params(params)
+
+ if "alpha".lower() in params:
+ # raise ValueError("alpha parameter is missing for grubbs algorithm")
+ alpha_val = float(params["alpha"])
+
+ if alpha_val <= 0 or alpha_val >= 1:
+ raise ValueError("invalid alpha value, valid range is (0, 1)")
+
+ self.alpha = 1 - alpha_val
diff --git a/tools/tdgpt/taosanalytics/algo/ad/iqr.py b/tools/tdgpt/taosanalytics/algo/ad/iqr.py
new file mode 100644
index 0000000000..c0918a5090
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/iqr.py
@@ -0,0 +1,29 @@
+# encoding:utf-8
+"""iqr class definition"""
+import numpy as np
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _IqrService(AbstractAnomalyDetectionService):
+ """ IQR algorithm is to check the anomaly data in the input list """
+ name = 'iqr'
+ desc = """found the anomaly data according to the inter-quartile range"""
+
+ def __init__(self):
+ super().__init__()
+
+ def execute(self):
+ if self.input_is_empty():
+ return []
+
+ lower = np.quantile(self.list, 0.25)
+ upper = np.quantile(self.list, 0.75)
+
+ min_val = lower - 1.5 * (upper - lower)
+ max_val = upper + 1.5 * (upper - lower)
+
+ threshold = [min_val, max_val]
+ return [-1 if k < threshold[0] or k > threshold[1] else 1 for k in self.list]
+
+ def set_params(self, params):
+ pass
diff --git a/tools/tdgpt/taosanalytics/algo/ad/ksigma.py b/tools/tdgpt/taosanalytics/algo/ad/ksigma.py
new file mode 100644
index 0000000000..9d872dd11a
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/ksigma.py
@@ -0,0 +1,47 @@
+# encoding:utf-8
+"""ksigma class definition"""
+
+import numpy as np
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _KSigmaService(AbstractAnomalyDetectionService):
+ """ KSigma algorithm is to check the anomaly data in the input list """
+ name = "ksigma"
+ desc = """the k-sigma algorithm (or 3σ rule) expresses a conventional heuristic that nearly all
+ values are taken to lie within k (usually three) standard deviations of the mean, and thus
+ it is empirically useful to treat 99.7% probability as near certainty"""
+
+ def __init__(self, k_val=3):
+ super().__init__()
+ self.k_val = k_val
+
+ def execute(self):
+ def get_k_sigma_range(vals, k_value):
+ """ Return the k-sigma value range """
+ avg = np.mean(vals)
+ std = np.std(vals)
+
+ upper = avg + k_value * std
+ lower = avg - k_value * std
+ return [float(lower), float(upper)]
+
+ if self.input_is_empty():
+ return []
+
+ threshold = get_k_sigma_range(self.list, self.k_val)
+ return [-1 if k < threshold[0] or k > threshold[1] else 1 for k in self.list]
+
+ def set_params(self, params):
+ super().set_params(params)
+
+ if "k" in params:
+ k = int(params["k"])
+
+ if k < 1 or k > 3:
+ raise ValueError("k value out of range, valid range [1, 3]")
+
+ self.k_val = k
+
+ def get_params(self):
+ return {"k": self.k_val}
diff --git a/tools/tdgpt/taosanalytics/algo/ad/lof.py b/tools/tdgpt/taosanalytics/algo/ad/lof.py
new file mode 100644
index 0000000000..9f7b8fdc04
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/lof.py
@@ -0,0 +1,43 @@
+# encoding:utf-8
+"""local outlier factor class definition"""
+
+import numpy as np
+import sklearn.neighbors as neighbor
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _LofService(AbstractAnomalyDetectionService):
+ """ LOF(local outlier factor) algorithm is to check the anomaly data in the input list """
+ name = 'lof'
+ desc = """Local Outlier Factor, Ref: M. M. Breunig, H. P. Kriegel, R. T. Ng, J. Sander.
+ LOF:Identifying Density-based Local Outliers. SIGMOD, 2000."""
+
+ def __init__(self, n_neighbors=10, algo="auto"):
+ super().__init__()
+
+ self.neighbors = n_neighbors
+ self.algorithm = algo
+
+ def execute(self):
+ """perform LOF(local outlier factor) test and identify (if any) the outlier"""
+ if self.input_is_empty():
+ return []
+
+ checker = neighbor.LocalOutlierFactor(n_neighbors=self.neighbors, algorithm=self.algorithm)
+
+ arr_2d = np.reshape(self.list, (len(self.list), 1))
+ res = checker.fit_predict(arr_2d)
+
+ print(f"The negative outlier factor is:{checker.negative_outlier_factor_}")
+ return res
+
+ def set_params(self, params):
+ super().set_params(params)
+
+ if "neighbors" in params: # todo check value range
+ self.neighbors = int(params["neighbors"])
+ if "algorithm" in params:
+ self.algorithm = params["algorithm"]
+
+ def get_params(self):
+ return {"neighbors": self.neighbors, "algorithm": self.algorithm}
diff --git a/tools/tdgpt/taosanalytics/algo/ad/shesd.py b/tools/tdgpt/taosanalytics/algo/ad/shesd.py
new file mode 100644
index 0000000000..e105743bb5
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/ad/shesd.py
@@ -0,0 +1,44 @@
+# encoding:utf-8
+"""shesd algorithm class definition"""
+
+from pandas import Series
+from pyculiarity import detect_vec
+from taosanalytics.service import AbstractAnomalyDetectionService
+
+
+class _SHESDService(AbstractAnomalyDetectionService):
+ """ s-h-esd algorithm is to check the anomaly data in the input list """
+ name = 'shesd'
+ desc = ""
+
+ def __init__(self, n_period=0, direction="both", anoms=0.05):
+ super().__init__()
+
+ self.period = n_period
+ self.direction = direction
+ self.max_anoms = anoms
+
+ def execute(self):
+ """perform SHESD test and identify (if any) the outlier"""
+ if self.input_is_empty():
+ return []
+
+ results = detect_vec(Series(self.list), max_anoms=self.max_anoms, direction=self.direction,
+ period=self.period)
+
+ res_val = results['anoms']['anoms']
+
+ return [-1 if k in set(res_val) else 1 for k in self.list]
+
+ def set_params(self, params):
+ super().set_params(params)
+
+ if "period" in params: # todo check value range
+ self.period = int(params["period"])
+ if "direction" in params:
+ self.direction = params["direction"]
+ if "max_anoms" in params:
+ self.max_anoms = float(params["max_anoms"])
+
+ def get_params(self):
+ return {"period": self.period, "direction": self.direction, "max_anoms": self.max_anoms}
diff --git a/tools/tdgpt/taosanalytics/algo/anomaly.py b/tools/tdgpt/taosanalytics/algo/anomaly.py
new file mode 100644
index 0000000000..5f04283c06
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/anomaly.py
@@ -0,0 +1,49 @@
+# encoding:utf-8
+# pylint: disable=c0103
+""" anomaly detection register/display functions """
+
+from matplotlib import pyplot as plt
+from taosanalytics.conf import app_logger, conf
+from taosanalytics.servicemgmt import loader
+
+
+def do_ad_check(input_list, ts_list, algo_name, params):
+ """ actual anomaly detection handler """
+ s = loader.get_service(algo_name)
+
+ if s is None:
+ s = loader.get_service("ksigma")
+
+ if s is None:
+ raise ValueError(f"failed to load {algo_name} or ksigma analysis service")
+
+ s.set_input_list(input_list, ts_list)
+ s.set_params(params)
+
+ res = s.execute()
+
+ n_error = abs(sum(filter(lambda x: x == -1, res)))
+ app_logger.log_inst.debug("There are %d in input, and %d anomaly points found: %s",
+ len(input_list),
+ n_error,
+ res)
+
+ draw_ad_results(input_list, res, algo_name)
+ return res
+
+
+def draw_ad_results(input_list, res, fig_name):
+ """ draw the detected anomaly points """
+
+ # not in debug, do not visualize the anomaly detection result
+ if not conf.get_draw_result_option():
+ return
+
+ plt.clf()
+ for index, val in enumerate(res):
+ if val != -1:
+ continue
+ plt.scatter(index, input_list[index], marker='o', color='r', alpha=0.5, s=100, zorder=3)
+
+ plt.plot(input_list, label='sample')
+ plt.savefig(fig_name)
diff --git a/tools/tdgpt/taosanalytics/algo/fc/__init__.py b/tools/tdgpt/taosanalytics/algo/fc/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/algo/fc/arima.py b/tools/tdgpt/taosanalytics/algo/fc/arima.py
new file mode 100644
index 0000000000..9e087a5e9e
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/fc/arima.py
@@ -0,0 +1,114 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""arima class definition"""
+import pmdarima as pm
+
+from taosanalytics.algo.forecast import insert_ts_list
+from taosanalytics.conf import app_logger
+from taosanalytics.service import AbstractForecastService
+
+
+class _ArimaService(AbstractForecastService):
+ """ ARIMA algorithm is to do the fc in the input list """
+ name = "arima"
+ desc = "do time series data fc by using ARIMA model"
+
+ def __init__(self):
+ super().__init__()
+
+ self.diff = 0
+ self.start_p = 0
+ self.max_p = 10
+ self.start_q = 0
+ self.max_q = 10
+
+ def set_params(self, params):
+ super().set_params(params)
+
+ self.start_p = int(params['start_p']) if 'start_p' in params else 0
+ self.max_p = int(params['max_p']) if 'max_p' in params else 0
+ self.start_q = int(params['start_q']) if 'start_q' in params else 0
+ self.max_q = int(params['max_q']) if 'max_q' in params else 0
+
+ def get_params(self):
+ """ get the default value for fc algorithms """
+ p = super().get_params()
+ p.update(
+ {
+ "start_p": self.start_p, "max_p": self.max_p, "start_q": self.start_q,
+ "max_q": self.max_q, "diff": self.diff
+ }
+ )
+
+ return p
+
+ def __do_forecast_helper(self, fc_rows):
+ """ do arima fc """
+ # plot_acf(self.list, lags=25, title='raw_acf')
+ # plot_pacf(self.list, lags=25, title='raw_pacf')
+ # plt.show()
+
+ seasonal = self.period > 0
+
+ # Fit model
+ model = pm.auto_arima(self.list,
+ start_p=self.start_p,
+ start_q=self.start_q,
+ max_p=self.max_p,
+ max_q=self.max_q,
+ d=1,
+ m=self.period,
+ seasonal=seasonal,
+ start_P=0,
+ D=self.diff)
+
+ app_logger.log_inst.debug(model.summary())
+
+ # predict N steps into the future
+ fc = model.predict(n_periods=fc_rows, return_conf_int=self.return_conf,
+ alpha=self.conf)
+
+ # plt.plot(source_data, label='training')
+ # plt.plot(xrange, actual_data, label='actual')
+
+ # fc_list = fc.tolist()
+ # fc_without_diff = restore_from_diff(self.list, fc_list, 2)
+ # print(fc_without_diff)
+
+ # plt.plot(xrange, fc_without_diff, label='fc')
+
+ # residuals = pd.DataFrame(model.arima_res_.resid)
+ # wn = is_white_noise(residuals)
+ # print("residual is white noise:", wn)
+
+ # fig, ax = plt.subplots(1, 2)
+ # residuals.plot(title="Residuals", ax=ax[0])
+ # residuals.plot(kind='kde', title='Density', ax=ax[1])
+ # plt.show()
+
+ res1 = [fc[0].tolist(), fc[1][:, 0].tolist(),
+ fc[1][:, 1].tolist()] if self.return_conf else [fc.tolist()]
+
+ return (
+ res1,
+ model.arima_res_.mse,
+ f"SARIMAX{model.order}x{model.seasonal_order}"
+ )
+
+ def execute(self):
+ """ do fc the time series data"""
+
+ if self.list is None or len(self.list) < self.period:
+ raise ValueError("number of input data is less than the periods")
+
+ if self.fc_rows <= 0:
+ raise ValueError("fc rows is not specified yet")
+
+ res, mse, model_info = self.__do_forecast_helper(self.fc_rows)
+ insert_ts_list(res, self.start_ts, self.time_step, self.fc_rows)
+
+ return {
+ "mse": mse,
+ "model_info": model_info,
+ "res": res
+ }
diff --git a/tools/tdgpt/taosanalytics/algo/fc/holtwinters.py b/tools/tdgpt/taosanalytics/algo/fc/holtwinters.py
new file mode 100644
index 0000000000..d8225eaa5a
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/fc/holtwinters.py
@@ -0,0 +1,79 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""holt winters definition"""
+
+from statsmodels.tsa.holtwinters import ExponentialSmoothing, SimpleExpSmoothing
+
+from taosanalytics.algo.forecast import insert_ts_list
+from taosanalytics.service import AbstractForecastService
+
+
+class _HoltWintersService(AbstractForecastService):
+ """ Holt winters algorithm is to do the fc in the input list """
+ name = "holtwinters"
+ desc = "forecast algorithm by using exponential smoothing"
+
+ def __init__(self):
+ super().__init__()
+
+ self.trend_option = None
+ self.seasonal_option = None
+
+ def set_params(self, params):
+ super().set_params(params)
+
+ self.trend_option = params['trend'] if 'trend' in params else None
+
+ if self.trend_option is not None:
+ if self.trend_option not in ('add', 'mul'):
+ raise ValueError("trend parameter can only be 'mul' or 'add'")
+
+ self.seasonal_option = params['seasonal'] if 'seasonal' in params else None
+ if self.seasonal_option is not None:
+ if self.seasonal_option not in ('add', 'mul'):
+ raise ValueError("seasonal parameter can only be 'mul' or 'add'")
+
+ def get_params(self):
+ p = super().get_params()
+ p.update({'trend': self.trend_option, 'seasonal': self.seasonal_option})
+ return p
+
+ def __do_forecast_helper(self, source_data, fc_rows):
+ """ do holt winters impl """
+ if self.trend_option is None:
+ fitted_model = SimpleExpSmoothing(source_data).fit()
+ else:
+ if self.period == 0 or self.seasonal_option is None:
+ # no valid seasonal periods, so not need to specify the seasonal parameters
+ fitted_model = ExponentialSmoothing(source_data, trend=self.trend_option).fit()
+ else: # seasonal attributes
+ fitted_model = ExponentialSmoothing(
+ source_data,
+ trend=self.trend_option,
+ seasonal=self.seasonal_option,
+ seasonal_periods=self.period
+ ).fit()
+
+ fc = fitted_model.forecast(fc_rows)
+
+ if self.return_conf:
+ return [fc.tolist(), fc.tolist(), fc.tolist()], fitted_model.sse
+ else:
+ return [fc.tolist()], fitted_model.sse
+
+ def execute(self):
+ """ do fc the time series data"""
+ if self.list is None or len(self.list) < self.period:
+ raise ValueError("number of input data is less than the periods")
+
+ if self.fc_rows <= 0:
+ raise ValueError("fc rows is not specified yet")
+
+ res, mse = self.__do_forecast_helper(self.list, self.fc_rows)
+ insert_ts_list(res, self.start_ts, self.time_step, self.fc_rows)
+
+ # add the conf range if required
+ return {
+ "mse": mse,
+ "res": res
+ }
diff --git a/tools/tdgpt/taosanalytics/algo/forecast.py b/tools/tdgpt/taosanalytics/algo/forecast.py
new file mode 100644
index 0000000000..e1e321a7b0
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/algo/forecast.py
@@ -0,0 +1,110 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""forecast helper methods"""
+
+import numpy as np
+import pandas as pd
+from matplotlib import pyplot as plt
+
+from taosanalytics.conf import app_logger, conf
+from taosanalytics.servicemgmt import loader
+
+
+def do_forecast(input_list, ts_list, algo_name, params):
+ """ data fc handler """
+ s = loader.get_service(algo_name)
+
+ if s is None:
+ s = loader.get_service("holtwinters")
+
+ if s is None:
+ raise ValueError(f"failed to load {algo_name} or holtwinters analysis service")
+
+ s.set_input_list(input_list, ts_list)
+ s.set_params(params)
+
+ app_logger.log_inst.debug("start to do forecast")
+ res = s.execute()
+
+ app_logger.log_inst.debug("forecast done")
+
+ res["period"] = s.period
+ res["algo"] = algo_name
+
+ check_fc_results(res)
+
+ fc = res["res"]
+ draw_fc_results(input_list, len(fc) > 2, fc, len(fc[0]), algo_name)
+ return res
+
+
+def do_add_fc_params(params, json_obj):
+ """ add params into parameters """
+ if "forecast_rows" in json_obj:
+ params["fc_rows"] = int(json_obj["forecast_rows"])
+
+ if "start" in json_obj:
+ params["start_ts"] = int(json_obj["start"])
+
+ if "every" in json_obj:
+ params["time_step"] = int(json_obj["every"])
+
+ if "conf" in json_obj:
+ params["conf"] = int(json_obj["conf"])
+
+ if "return_conf" in json_obj:
+ params["return_conf"] = int(json_obj["return_conf"])
+
+
+def insert_ts_list(res, start_ts, time_step, fc_rows):
+ """ insert the ts list before return results """
+ ts_list = [start_ts + i * time_step for i in range(fc_rows)]
+ res.insert(0, ts_list)
+ return res
+
+
+def draw_fc_results(input_list, return_conf, fc, n_rows, fig_name):
+ """Visualize the forecast results """
+ # controlled by option, do not visualize the anomaly detection result
+ if not conf.get_draw_result_option():
+ return
+
+ app_logger.log_inst.debug('draw forecast result in debug model')
+ plt.clf()
+
+ x = np.arange(len(input_list), len(input_list) + n_rows, 1)
+
+ # draw the range of conf
+ if return_conf:
+ lower_series = pd.Series(fc[2], index=x)
+ upper_series = pd.Series(fc[3], index=x)
+
+ plt.fill_between(lower_series.index, lower_series, upper_series, color='k', alpha=.15)
+
+ plt.plot(input_list)
+ plt.plot(x, fc[1], c='blue')
+ plt.savefig(fig_name)
+
+ app_logger.log_inst.debug("draw results completed in debug model")
+
+
+def check_fc_results(res):
+ app_logger.log_inst.debug("start to check forecast result")
+
+ if "res" not in res:
+ raise ValueError("forecast result is empty")
+
+ fc = res["res"]
+ if len(fc) < 2:
+ raise ValueError("result length should greater than or equal to 2")
+
+ n_rows = len(fc[0])
+ if n_rows != len(fc[1]):
+ raise ValueError("result length is not identical, ts rows:%d res rows:%d" % (
+ n_rows, len(fc[1])))
+
+ if len(fc) > 2 and (len(fc[2]) != n_rows or len(fc[3]) != n_rows):
+ raise ValueError(
+ "result length is not identical in confidence, ts rows:%d, lower confidence rows:%d, "
+ "upper confidence rows%d" %
+ (n_rows, len(fc[2]), len(fc[3])))
diff --git a/tools/tdgpt/taosanalytics/app.py b/tools/tdgpt/taosanalytics/app.py
new file mode 100644
index 0000000000..682bce012c
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/app.py
@@ -0,0 +1,163 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""the main route definition for restful service"""
+import os.path, sys
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../")
+
+from flask import Flask, request
+
+from taosanalytics.algo.anomaly import do_ad_check
+from taosanalytics.algo.forecast import do_forecast, do_add_fc_params
+from taosanalytics.conf import conf
+from taosanalytics.model import get_avail_model
+from taosanalytics.servicemgmt import loader
+from taosanalytics.util import app_logger, validate_pay_load, get_data_index, get_ts_index, is_white_noise, \
+ parse_options, convert_results_to_windows
+
+app = Flask(__name__)
+
+# load the all algos
+app_logger.set_handler(conf.get_log_path())
+app_logger.set_log_level(conf.get_log_level())
+loader.load_all_service()
+
+
+@app.route("/")
+def start():
+ """ default rsp """
+ return "TDengine© Time Series Data Analytics Platform (ver 1.0.1)"
+
+
+@app.route("/status")
+def server_status():
+ """ return server status """
+ return {
+ 'protocol': 1.0,
+ 'status': 'ready'
+ }
+
+
+@app.route("/list")
+def list_all_services():
+ """
+ API function to return all available services, including both fc and anomaly detection
+ """
+ return loader.get_service_list()
+
+
+@app.route("/models")
+def list_all_models():
+ """ list all available models """
+ return get_avail_model()
+
+
+@app.route("/anomaly-detect", methods=['POST'])
+def handle_ad_request():
+ """handle the anomaly detection requests"""
+ app_logger.log_inst.info('recv ad request from %s', request.remote_addr)
+ app_logger.log_inst.debug('req payload: %s', request.json)
+
+ algo = request.json["algo"].lower() if "algo" in request.json else "ksigma"
+
+ # 1. validate the input data in json format
+ try:
+ validate_pay_load(request.json)
+ except ValueError as e:
+ return {"msg": str(e), "rows": -1}
+
+ payload = request.json["data"]
+
+ # 2. white noise data check
+ wn_check = request.json["wncheck"] if "wncheck" in request.json else 1
+
+ data_index = get_data_index(request.json["schema"])
+ ts_index = get_ts_index(request.json["schema"])
+
+ if wn_check:
+ try:
+ data = payload[data_index]
+ if is_white_noise(data):
+ app_logger.log_inst.debug("wn data, not process")
+ return {"msg": "white noise can not be check", "rows": -1}
+ except Exception as e:
+ return {"msg": str(e), "rows": -1}
+
+ # 3. parse the options for different ad services
+ # the default options is like following: "algo=ksigma,k=2,invalid_option=44"
+ options = request.json["option"] if "option" in request.json else None
+ params = parse_options(options)
+
+ # 4. do anomaly detection
+ try:
+ res_list = do_ad_check(payload[data_index], payload[ts_index], algo, params)
+ ano_window = convert_results_to_windows(res_list, payload[ts_index])
+
+ result = {"algo": algo, "option": options, "res": ano_window, "rows": len(ano_window)}
+ app_logger.log_inst.debug("anomaly-detection result: %s", str(result))
+
+ return result
+
+ except Exception as e:
+ result = {"res": {}, "rows": 0, "msg": str(e)}
+ app_logger.log_inst.error("failed to do anomaly-detection, %s", str(e))
+
+ return result
+
+
+@app.route("/forecast", methods=['POST'])
+def handle_forecast_req():
+ """handle the fc request """
+ app_logger.log_inst.info('recv fc from %s', request.remote_addr)
+ app_logger.log_inst.debug('req payload: %s', request.json)
+
+ # holt-winters by default
+ algo = request.json['algo'].lower() if 'algo' in request.json else 'holtwinters'
+
+ # 1. validate the input data in json format
+ try:
+ validate_pay_load(request.json)
+ except ValueError as e:
+ app_logger.log_inst.error('validate req json failed, %s', e)
+ return {"msg": str(e), "rows": -1}
+
+ payload = request.json["data"]
+
+ # 2. white noise data check
+ wn_check = request.json["wncheck"] if "wncheck" in request.json else 1
+ data_index = get_data_index(request.json["schema"])
+ ts_index = get_ts_index(request.json["schema"])
+
+ if wn_check:
+ try:
+ data = payload[data_index]
+ if is_white_noise(data):
+ app_logger.log_inst.debug("%s wn data, not process", data)
+ return {"msg": "white noise can not be check", "rows": -1}
+ except Exception as e:
+ return {"msg": str(e), "rows": -1}
+
+ options = request.json["option"] if "option" in request.json else None
+ params = parse_options(options)
+
+ try:
+ do_add_fc_params(params, request.json)
+ except ValueError as e:
+ app_logger.log_inst.error("invalid fc params: %s", e)
+ return {"msg": f"{e}", "rows": -1}
+
+ try:
+ res1 = do_forecast(payload[data_index], payload[ts_index], algo, params)
+ res = {"option": options, "rows": params["fc_rows"]}
+ res.update(res1)
+
+ app_logger.log_inst.debug("forecast result: %s", res)
+
+ return res
+ except Exception as e:
+ app_logger.log_inst.error('forecast failed, %s', str(e))
+ return {"msg": str(e), "rows": -1}
+
+
+if __name__ == '__main__':
+ app.run()
diff --git a/tools/tdgpt/taosanalytics/conf.py b/tools/tdgpt/taosanalytics/conf.py
new file mode 100644
index 0000000000..c255b8e258
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/conf.py
@@ -0,0 +1,105 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""configuration model definition"""
+import configparser
+import logging
+
+_ANODE_SECTION_NAME = "taosanode"
+
+
+class Configure:
+ """ configuration class """
+
+ def __init__(self, conf_path="/etc/taos/taosanode.ini"):
+ self.path = None
+
+ self._log_path = 'taosanode.app.log'
+ self._log_level = logging.INFO
+ self._model_directory = '/var/lib/taos/taosanode/model/'
+ self._draw_result = 0
+
+ self.conf = configparser.ConfigParser()
+ self.reload(conf_path)
+
+ def get_log_path(self) -> str:
+ """ return log file full path """
+ return self._log_path
+
+ def get_log_level(self):
+ """ return the log level specified by configuration file """
+ return self._log_level
+
+ def get_model_directory(self):
+ """ return model directory """
+ return self._model_directory
+
+ def get_draw_result_option(self):
+ """ get the option for draw results or not"""
+ return self._draw_result
+
+ def reload(self, new_path: str):
+ """ load the info from config file """
+ self.path = new_path
+
+ self.conf.read(self.path)
+
+ if self.conf.has_option(_ANODE_SECTION_NAME, 'app-log'):
+ self._log_path = self.conf.get(_ANODE_SECTION_NAME, 'app-log')
+
+ if self.conf.has_option(_ANODE_SECTION_NAME, 'log-level'):
+ log_level = self.conf.get(_ANODE_SECTION_NAME, 'log-level')
+
+ log_flag = {
+ 'DEBUG': logging.DEBUG, 'INFO': logging.INFO, 'CRITICAL': logging.CRITICAL,
+ 'ERROR': logging.ERROR, 'WARN': logging.WARN
+ }
+
+ if log_level.upper() in log_flag:
+ self._log_level = log_flag[log_level.upper()]
+ else:
+ self._log_level = logging.INFO
+
+ if self.conf.has_option(_ANODE_SECTION_NAME, 'model-dir'):
+ self._model_directory = self.conf.get(_ANODE_SECTION_NAME, 'model-dir')
+
+ if self.conf.has_option(_ANODE_SECTION_NAME, 'draw-result'):
+ self._draw_result = self.conf.get(_ANODE_SECTION_NAME, 'draw-result')
+
+
+class AppLogger():
+ """ system log_inst class """
+ LOG_STR_FORMAT = '%(asctime)s - %(threadName)s - %(levelname)s - %(message)s'
+
+ def __init__(self):
+ self.log_inst = logging.getLogger(__name__)
+ self.log_inst.setLevel(logging.INFO)
+
+ def set_handler(self, file_path: str):
+ """ set the log_inst handler """
+
+ handler = logging.FileHandler(file_path)
+ handler.setFormatter(logging.Formatter(self.LOG_STR_FORMAT))
+
+ self.log_inst.addHandler(handler)
+
+ def set_log_level(self, log_level):
+ """adjust log level"""
+ try:
+ self.log_inst.setLevel(log_level)
+ self.log_inst.info("set log level:%d", log_level)
+ except ValueError as e:
+ self.log_inst.error("failed to set log level: %d, %s", log_level, str(e))
+
+
+conf = Configure()
+app_logger = AppLogger()
+
+
+def setup_log_info(name: str):
+ """ prepare the log info for unit test """
+ app_logger.set_handler(name)
+
+ try:
+ app_logger.set_log_level(logging.DEBUG)
+ except ValueError as e:
+ print("set log level failed:%s", e)
diff --git a/tools/tdgpt/taosanalytics/misc/__init__.py b/tools/tdgpt/taosanalytics/misc/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/model.py b/tools/tdgpt/taosanalytics/model.py
new file mode 100644
index 0000000000..6efd85544e
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/model.py
@@ -0,0 +1,22 @@
+# encoding:utf-8
+# pylint: disable=c0103
+
+def get_avail_model():
+ return [
+ {
+ "name": "ad_encoder_keras",
+ "algo": "auto-encoder",
+ "type": "anomaly-detection",
+ "src-table": "*",
+ "build-time": "2024-10-07 13:21:44"
+ }
+ ]
+
+
+def train_model():
+ pass
+
+
+if __name__ == '__main__':
+ a = get_avail_model()
+ print(a)
diff --git a/tools/tdgpt/taosanalytics/service.py b/tools/tdgpt/taosanalytics/service.py
new file mode 100644
index 0000000000..79244aae8c
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/service.py
@@ -0,0 +1,110 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""main service module"""
+from abc import abstractmethod, ABC
+
+
+class AnalyticsService:
+ """ Analytics root class with only one method"""
+
+ @abstractmethod
+ def execute(self):
+ """ the main execute method to perform fc or anomaly detection """
+
+ def get_desc(self) -> str:
+ """algorithm description"""
+ return ""
+
+ def get_params(self) -> dict:
+ """return exist params """
+ return {}
+
+
+class AbstractAnalyticsService(AnalyticsService, ABC):
+ """ abstract base analytics service class definition"""
+ name = ''
+ desc = ''
+
+ def __init__(self):
+ self.list = None
+ self.ts_list = None
+
+ def set_input_list(self, input_list: list, input_ts_list: list = None):
+ """ set the input list """
+ self.list = input_list
+ self.ts_list = input_ts_list
+
+ def set_params(self, params: dict) -> None:
+ """set the parameters for current algo """
+ if params is None:
+ return
+
+ if not isinstance(params, dict):
+ raise ValueError('invalid parameter type, only dict allowed')
+
+ def get_desc(self) -> str:
+ return self.desc
+
+
+class AbstractAnomalyDetectionService(AbstractAnalyticsService, ABC):
+ """ abstract anomaly detection service, all anomaly detection algorithm class should be
+ inherent from this class"""
+
+ def __init__(self):
+ super().__init__()
+ self.type = "anomaly-detection"
+
+ def input_is_empty(self):
+ """ check if the input list is empty or None """
+ return (self.list is None) or (len(self.list) == 0)
+
+
+class AbstractForecastService(AbstractAnalyticsService, ABC):
+ """abstract forecast service, all forecast algorithms class should be inherent from
+ this base class"""
+
+ def __init__(self):
+ super().__init__()
+ self.type = "forecast"
+
+ self.period = 0
+ self.start_ts = 0
+ self.time_step = 0
+ self.fc_rows = 0
+
+ self.return_conf = 1
+ self.conf = 0.05
+
+ def set_params(self, params: dict) -> None:
+ if not {'start_ts', 'time_step', 'fc_rows'}.issubset(params.keys()):
+ raise ValueError('params are missing, start_ts, time_step, fc_rows are all required')
+
+ self.start_ts = int(params['start_ts'])
+
+ self.time_step = int(params['time_step'])
+
+ if self.time_step <= 0:
+ raise ValueError('time_step should be greater than 0')
+
+ self.fc_rows = int(params['fc_rows'])
+
+ if self.fc_rows <= 0:
+ raise ValueError('fc rows is not specified yet')
+
+ self.period = int(params['period']) if 'period' in params else 0
+ if self.period < 0:
+ raise ValueError("periods should be greater than 0")
+
+ self.conf = float(params['conf']) if 'conf' in params else 95
+
+ self.conf = 1.0 - self.conf / 100.0
+ if self.conf < 0 or self.conf >= 1.0:
+ raise ValueError("invalid value of conf, should between 0 and 100")
+
+ self.return_conf = int(params['return_conf']) if 'return_conf' in params else 1
+
+ def get_params(self):
+ return {
+ "period": self.period, "start": self.start_ts, "every": self.time_step,
+ "forecast_rows": self.fc_rows, "return_conf": self.return_conf, "conf": self.conf
+ }
diff --git a/tools/tdgpt/taosanalytics/servicemgmt.py b/tools/tdgpt/taosanalytics/servicemgmt.py
new file mode 100644
index 0000000000..5b20c73249
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/servicemgmt.py
@@ -0,0 +1,120 @@
+# encoding:utf-8
+"""load and return the available services"""
+import copy
+import importlib
+import inspect
+import os
+from collections import defaultdict
+from taosanalytics.conf import app_logger
+from taosanalytics.service import AbstractAnomalyDetectionService, AbstractForecastService
+
+os.environ['KERAS_BACKEND'] = 'torch'
+
+
+class AnalyticsServiceLoader:
+ """ Singleton register for multiple anomaly detection algorithms and fc algorithms"""
+
+ def __init__(self):
+ self.services = defaultdict(list)
+
+ def get_service(self, name):
+ """ get the required service """
+ serv = self.services.get(name, [])[0] if self.services.get(name) else None
+ return copy.copy(serv)
+
+ def get_typed_services(self, type_str: str) -> list:
+ """ get specified type service """
+ all_items = []
+ for key, val in self.services.items():
+ if val[0].type == type_str:
+ try:
+ one = {"name": key, "desc": val[0].get_desc(), "params": val[0].get_params()}
+ all_items.append(one)
+ except AttributeError as e:
+ app_logger.log_inst.error("failed to get service: %s info, reason: %s", key, e);
+
+ return all_items
+
+ def get_service_list(self):
+ """ return all available service info """
+ info = {
+ "protocol": 1.0,
+ "version": 0.1,
+ "details": [
+ self.get_forecast_algo_list(),
+ self.get_anomaly_detection_algo_list()
+ ]
+ }
+
+ return info
+
+ def get_anomaly_detection_algo_list(self):
+ """ get all available service list """
+ return {
+ "type": "anomaly-detection",
+ "algo": self.get_typed_services("anomaly-detection")
+ }
+
+ def get_forecast_algo_list(self):
+ """ get all available service list """
+ return {
+ "type": "forecast",
+ "algo": self.get_typed_services("forecast")
+ }
+
+ def load_all_service(self) -> None:
+ """ load all algorithms in the specified directory"""
+
+ def register_service(container, name: str, service):
+ """ register service for both anomaly detection and fc """
+ app_logger.log_inst.info("register service: %s", name)
+ container[name].append(service)
+
+ def do_load_service(cur_directory, lib_prefix, sub_directory):
+ """ the implementation of load services """
+ service_directory = cur_directory + sub_directory
+
+ if not os.path.exists(service_directory):
+ app_logger.log_inst.fatal(
+ "service directory:%s not lib exists, failed to load service",
+ service_directory)
+ raise FileNotFoundError(f"service directory:{service_directory} not found")
+
+ all_files = os.listdir(service_directory)
+
+ for item in all_files:
+ if item in ('__init__.py', '__pycache__') or not item.endswith('py'):
+ continue
+
+ full_path = os.path.join(service_directory, item)
+ if os.path.isdir(full_path):
+ continue
+
+ # do load algorithm
+ name = lib_prefix + item.split('.')[0]
+ module = importlib.import_module(name)
+
+ app_logger.log_inst.info("load algorithm:%s", name)
+
+ for (class_name, _) in inspect.getmembers(module, inspect.isclass):
+
+ if class_name in (
+ AbstractAnomalyDetectionService.__name__,
+ AbstractForecastService.__name__
+ ) or (not class_name.startswith('_')):
+ continue
+
+ algo_cls = getattr(module, class_name)
+
+ if algo_cls is not None:
+ obj = algo_cls()
+ register_service(self.services, algo_cls.name, obj)
+
+ # start to load all services
+ current_directory = os.path.dirname(os.path.abspath(__file__))
+
+ do_load_service(current_directory, 'taosanalytics.algo.ad.', '/algo/ad/')
+ do_load_service(current_directory, 'taosanalytics.algo.fc.', '/algo/fc/')
+
+
+loader: AnalyticsServiceLoader = AnalyticsServiceLoader()
diff --git a/tools/tdgpt/taosanalytics/test/__init__.py b/tools/tdgpt/taosanalytics/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tools/tdgpt/taosanalytics/test/anomaly_test.py b/tools/tdgpt/taosanalytics/test/anomaly_test.py
new file mode 100644
index 0000000000..f44a7f0d52
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/test/anomaly_test.py
@@ -0,0 +1,170 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""anomaly detection unit test"""
+import unittest, sys, os.path
+import pandas as pd
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../")
+
+from taosanalytics.algo.anomaly import draw_ad_results
+from taosanalytics.conf import setup_log_info, app_logger
+from taosanalytics.servicemgmt import loader
+
+
+class AnomalyDetectionTest(unittest.TestCase):
+ """ anomaly detection unit test class"""
+ input_list = [5, 14, 15, 15, 14, 19, 17, 16, 20, 22, 8, 21, 28, 11, 9, 29, 40]
+ large_list = [
+ 13, 14, 8, 10, 16, 26, 32, 27, 18, 32, 36, 24,
+ 22, 23, 22, 18, 25, 21, 21, 14, 8, 11, 14, 23,
+ 18, 17, 19, 20, 22, 19, 13, 26, 13, 14, 22, 24,
+ 21, 22, 26, 21, 23, 24, 27, 41, 31, 27, 35, 26,
+ 28, 36, 39, 21, 17, 22, 17, 19, 15, 34, 10, 15,
+ 22, 18, 15, 20, 15, 22, 19, 16, 30, 27, 29, 23,
+ 20, 16, 21, 21, 25, 16, 18, 15, 18, 14, 10, 15,
+ 8, 15, 6, 11, 8, 7, 13, 10, 23, 16, 15, 25,
+ 22, 20, 16
+ ]
+
+ @classmethod
+ def setUpClass(cls):
+ """ set up environment for unit test, set the log file path """
+ setup_log_info("unit_test.log")
+ loader.load_all_service()
+
+ def test_ksigma(self):
+ """
+ Test the ksigma algorithm for anomaly detection. This test case verifies the
+ functionality of the ksigma algorithm by setting up the input data,
+ executing the algorithm, and asserting the expected results.
+ """
+
+ s = loader.get_service("ksigma")
+ s.set_input_list(AnomalyDetectionTest.input_list, None)
+ s.set_params({"k": 2})
+
+ r = s.execute()
+ draw_ad_results(AnomalyDetectionTest.input_list, r, "ksigma")
+
+ self.assertEqual(r[-1], -1)
+ self.assertEqual(len(r), len(AnomalyDetectionTest.input_list))
+
+ def test_iqr(self):
+ """
+ Test the IQR(Interquartile Range) algorithm for anomaly detection. This test case verifies the functionality
+ of the IQR algorithm by setting up the input data, executing the algorithm, and asserting the expected results.
+ """
+
+ s = loader.get_service("iqr")
+ s.set_input_list(AnomalyDetectionTest.input_list, None)
+
+ try:
+ s.set_params({"k": 2})
+ except ValueError as e:
+ self.assertEqual(1, 0, e)
+
+ r = s.execute()
+ draw_ad_results(AnomalyDetectionTest.input_list, r, "iqr")
+
+ self.assertEqual(r[-1], -1)
+ self.assertEqual(len(r), len(AnomalyDetectionTest.input_list))
+
+ def test_grubbs(self):
+ """
+ Test the Grubbs algorithm for anomaly detection.
+
+ This test case verifies the functionality of the Grubbs algorithm by setting up the input data,
+ executing the algorithm, and asserting the expected results.
+ """
+
+ s = loader.get_service("grubbs")
+ s.set_input_list(AnomalyDetectionTest.input_list, None)
+ s.set_params({"alpha": 0.95})
+
+ r = s.execute()
+ draw_ad_results(AnomalyDetectionTest.input_list, r, "grubbs")
+
+ self.assertEqual(r[-1], -1)
+ self.assertEqual(len(r), len(AnomalyDetectionTest.input_list))
+
+ def test_shesd(self):
+ """
+ Test the SHESD (Seasonal Hybrid ESD) algorithm for anomaly detection.
+
+ This test case verifies the functionality of the SHESD algorithm by setting up the input data,
+ executing the algorithm, and asserting the expected results.
+ """
+
+ s = loader.get_service("shesd")
+ s.set_params({"period": 3})
+ s.set_input_list(AnomalyDetectionTest.input_list, None)
+
+ r = s.execute()
+ draw_ad_results(AnomalyDetectionTest.input_list, r, "shesd")
+
+ self.assertEqual(r[-1], -1)
+
+ def test_lof(self):
+ """
+ Test the LOF (Local Outlier Factor) algorithm for anomaly detection.
+
+ This test case verifies the functionality of the LOF algorithm by setting up the input data,
+ executing the algorithm, and asserting the expected results.
+ """
+ s = loader.get_service("lof")
+ s.set_params({"period": 3})
+ s.set_input_list(AnomalyDetectionTest.input_list, None)
+
+ r = s.execute()
+ draw_ad_results(AnomalyDetectionTest.input_list, r, "lof")
+
+ self.assertEqual(r[-1], -1)
+ self.assertEqual(r[-2], -1)
+
+ def test_multithread_safe(self):
+ """ Test the multithread safe function"""
+ s1 = loader.get_service("shesd")
+ s2 = loader.get_service("shesd")
+
+ s1.set_params({"period": 3})
+ self.assertNotEqual(s1.period, s2.period)
+
+ def __load_remote_data_for_ad(self):
+ """load the remote data for anomaly detection"""
+
+ url = ("https://raw.githubusercontent.com/numenta/NAB/master/data/artificialWithAnomaly/"
+ "art_daily_jumpsup.csv")
+
+ remote_data = pd.read_csv(url, parse_dates=True, index_col="timestamp")
+ k = remote_data.values.ravel().tolist()
+ return k
+
+ def test_autoencoder_ad(self):
+ """for local test only, disabled it in github action"""
+ pass
+
+ # data = self.__load_remote_data_for_ad()
+ #
+ # s = loader.get_service("ad_encoder")
+ # s.set_input_list(data)
+ #
+ # try:
+ # s.set_params({"model": "ad_encoder_"})
+ # except ValueError as e:
+ # app_logger.log_inst.error(f"failed to set the param for auto_encoder algorithm, reason:{e}")
+ # return
+ #
+ # r = s.execute()
+ #
+ # num_of_error = -(sum(filter(lambda x: x == -1, r)))
+ # self.assertEqual(num_of_error, 109)
+ #
+ # draw_ad_results(data, r, "autoencoder")
+
+ def test_get_all_services(self):
+ """Test get all services"""
+ loader.get_anomaly_detection_algo_list()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/tdgpt/taosanalytics/test/forecast_test.py b/tools/tdgpt/taosanalytics/test/forecast_test.py
new file mode 100644
index 0000000000..1e4874b8c8
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/test/forecast_test.py
@@ -0,0 +1,115 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""forecast unit test cases"""
+
+import unittest, os.path, sys
+import pandas as pd
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../")
+
+from taosanalytics.algo.forecast import draw_fc_results
+from taosanalytics.conf import setup_log_info
+from taosanalytics.servicemgmt import loader
+
+
+class ForecastTest(unittest.TestCase):
+ """forecast unit test cases"""
+
+ @classmethod
+ def setUpClass(cls):
+ """ set up the environment for unit test """
+ setup_log_info("unit_test.log")
+ loader.load_all_service()
+
+ def get_input_list(self):
+ """ load data from csv """
+ url = ('https://raw.githubusercontent.com/jbrownlee/Datasets/refs/heads/master/'
+ 'airline-passengers.csv')
+ data = pd.read_csv(url, index_col='Month', parse_dates=True)
+
+ ts_list = data[['Passengers']].index.tolist()
+ dst_list = [int(item.timestamp()) for item in ts_list]
+
+ return data[['Passengers']].values.tolist(), dst_list
+
+ def test_holt_winters_forecast(self):
+ """ test holt winters forecast with invalid and then valid parameters"""
+ s = loader.get_service("holtwinters")
+ data, ts = self.get_input_list()
+
+ s.set_input_list(data, ts)
+ self.assertRaises(ValueError, s.execute)
+
+ s.set_params({"fc_rows": 10, "start_ts": 171000000, "time_step": 86400 * 30})
+
+ r = s.execute()
+ draw_fc_results(data, len(r["res"]) > 2, r["res"], len(r["res"][0]), "holtwinters")
+
+ def test_holt_winters_forecast_2(self):
+ """test holt winters with valid parameters"""
+ s = loader.get_service("holtwinters")
+ data, ts = self.get_input_list()
+
+ s.set_input_list(data, ts)
+ s.set_params(
+ {
+ "fc_rows": 10, "trend": 'mul', "seasonal": 'mul', "start_ts": 171000000,
+ "time_step": 86400 * 30, "period": 12
+ }
+ )
+
+ r = s.execute()
+
+ draw_fc_results(data, len(r["res"]) > 2, r["res"], len(r["res"][0]), "holtwinters")
+
+ def test_holt_winter_invalid_params(self):
+ """parameters validation check"""
+ s = loader.get_service("holtwinters")
+
+ self.assertRaises(ValueError, s.set_params, {"trend": "mul"})
+
+ self.assertRaises(ValueError, s.set_params, {"trend": "mul"})
+
+ self.assertRaises(ValueError, s.set_params, {"trend": "mul", "fc_rows": 10})
+
+ self.assertRaises(ValueError, s.set_params, {"trend": "multi"})
+
+ self.assertRaises(ValueError, s.set_params, {"seasonal": "additive"})
+
+ self.assertRaises(ValueError, s.set_params, {
+ "fc_rows": 10, "trend": 'multi', "seasonal": 'addi', "start_ts": 171000000,
+ "time_step": 86400 * 30, "period": 12}
+ )
+
+ self.assertRaises(ValueError, s.set_params,
+ {"fc_rows": 10, "trend": 'mul', "seasonal": 'add', "time_step": 86400 * 30, "period": 12}
+ )
+
+ s.set_params({"fc_rows": 10, "start_ts": 171000000, "time_step": 86400 * 30})
+
+ self.assertRaises(ValueError, s.set_params, {"fc_rows": 'abc', "start_ts": 171000000, "time_step": 86400 * 30})
+
+ self.assertRaises(ValueError, s.set_params, {"fc_rows": 10, "start_ts": "aaa", "time_step": "30"})
+
+ self.assertRaises(ValueError, s.set_params, {"fc_rows": 10, "start_ts": 171000000, "time_step": 0})
+
+ def test_arima(self):
+ """arima algorithm check"""
+ s = loader.get_service("arima")
+ data, ts = self.get_input_list()
+
+ s.set_input_list(data, ts)
+ self.assertRaises(ValueError, s.execute)
+
+ s.set_params(
+ {"fc_rows": 10, "start_ts": 171000000, "time_step": 86400 * 30, "period": 12,
+ "start_p": 0, "max_p": 10, "start_q": 0, "max_q": 10}
+ )
+ r = s.execute()
+
+ rows = len(r["res"][0])
+ draw_fc_results(data, len(r["res"]) > 1, r["res"], rows, "arima")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/tdgpt/taosanalytics/test/install_test.py b/tools/tdgpt/taosanalytics/test/install_test.py
new file mode 100644
index 0000000000..9c5aa9238f
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/test/install_test.py
@@ -0,0 +1,27 @@
+"""perform the build release package and install and then test the restful service"""
+
+import unittest
+import os
+
+
+class ForecastTest(unittest.TestCase):
+
+ def test_release(self):
+ """ test the package """
+ pass
+
+ # print("build install package")
+ # os.system("../../script/release.sh")
+ # print("build completed")
+ #
+ # self.assertEqual(os.path.exists("../../release/TDengine-enterprise-anode-1.0.0.tar.gz"), 1)
+
+ def test_install(self):
+ """ test """
+ pass
+
+ # print("start to install package")
+ # os.system("tar zxvf ../../release/TDengine-enterprise-anode-1.0.0.tar.gz")
+ # os.chdir("../../release/TDengine-enterprise-anode-1.0.0/")
+ #
+ # os.system("./install.sh")
diff --git a/tools/tdgpt/taosanalytics/test/restful_api_test.py b/tools/tdgpt/taosanalytics/test/restful_api_test.py
new file mode 100644
index 0000000000..6463343e00
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/test/restful_api_test.py
@@ -0,0 +1,259 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""flask restful api test module"""
+
+import sys, os.path
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../")
+
+from flask_testing import TestCase
+from taosanalytics.app import app
+from taosanalytics.conf import setup_log_info
+
+
+class RestfulTest(TestCase):
+ """ restful api test class """
+
+ def create_app(self):
+ app.testing = True
+ setup_log_info("restfull_test.log")
+ return app
+
+ def test_access_main_page(self):
+ """ test asscess default main page """
+ response = self.client.get('/')
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.content_length, len("TDengine© Time Series Data Analytics Platform (ver 1.0.1)") + 1)
+
+ def test_load_status(self):
+ """ test load the server status """
+ response = self.client.get('/status')
+ self.assertEqual(response.status_code, 200)
+ res = response.json
+
+ self.assertEqual(res['protocol'], 1.0)
+ self.assertEqual(res['status'], 'ready')
+
+ def test_load_algos(self):
+ """ test load provided algos"""
+ response = self.client.get('/list')
+ self.assertEqual(response.status_code, 200)
+
+ res = response.json
+ self.assertEqual(res['version'], 0.1)
+ self.assertEqual(res['protocol'], 1.0)
+
+ d = res['details']
+ self.assertEqual(len(d), 2)
+
+ def test_forecast(self):
+ """test forecast api"""
+ response = self.client.post("/forecast", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4]
+ ],
+ "data": [
+ [
+ 1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000,
+ 1577808010000, 1577808011000, 1577808012000, 1577808013000, 1577808014000,
+ 1577808015000, 1577808016000, 1577808017000, 1577808018000, 1577808019000,
+ 1577808020000, 1577808021000, 1577808022000, 1577808023000, 1577808024000,
+ 1577808025000, 1577808026000, 1577808027000, 1577808028000, 1577808029000,
+ 1577808030000, 1577808031000, 1577808032000, 1577808033000, 1577808034000,
+ 1577808035000, 1577808036000, 1577808037000, 1577808038000, 1577808039000,
+ 1577808040000, 1577808041000, 1577808042000, 1577808043000, 1577808044000,
+ 1577808045000, 1577808046000, 1577808047000, 1577808048000, 1577808049000,
+ 1577808050000, 1577808051000, 1577808052000, 1577808053000, 1577808054000,
+ 1577808055000, 1577808056000, 1577808057000, 1577808058000, 1577808059000,
+ 1577808060000, 1577808061000, 1577808062000, 1577808063000, 1577808064000,
+ 1577808065000, 1577808066000, 1577808067000, 1577808068000, 1577808069000,
+ 1577808070000, 1577808071000, 1577808072000, 1577808073000, 1577808074000,
+ 1577808075000, 1577808076000, 1577808077000, 1577808078000, 1577808079000,
+ 1577808080000, 1577808081000, 1577808082000, 1577808083000, 1577808084000,
+ 1577808085000, 1577808086000, 1577808087000, 1577808088000, 1577808089000,
+ 1577808090000, 1577808091000, 1577808092000, 1577808093000, 1577808094000,
+ 1577808095000
+ ],
+ [
+ 13, 14, 8, 10, 16, 26, 32, 27, 18, 32, 36, 24, 22, 23, 22, 18, 25, 21, 21,
+ 14, 8, 11, 14, 23, 18, 17, 19, 20, 22, 19, 13, 26, 13, 14, 22, 24, 21, 22,
+ 26, 21, 23, 24, 27, 41, 31, 27, 35, 26, 28, 36, 39, 21, 17, 22, 17, 19, 15,
+ 34, 10, 15, 22, 18, 15, 20, 15, 22, 19, 16, 30, 27, 29, 23, 20, 16, 21, 21,
+ 25, 16, 18, 15, 18, 14, 10, 15, 8, 15, 6, 11, 8, 7, 13, 10, 23, 16, 15, 25
+ ]
+ ],
+ "option": "algo=holtwinters",
+ "algo": "holtwinters",
+ "prec": "ms",
+ "wncheck": 1,
+ "return_conf": 1,
+ "forecast_rows": 10,
+ "conf": 95,
+ "start": 1577808096000,
+ "every": 1000,
+ "rows": 96,
+ "protocol": 1.0
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["algo"], "holtwinters")
+ self.assertEqual(response.json["rows"], 10)
+ self.assertEqual(response.json["period"], 0)
+ self.assertEqual(response.json["res"][0][0], 1577808096000)
+ self.assertEqual(response.json["res"][0][-1], 1577808105000)
+ self.assertEqual(len(response.json["res"][0]), response.json["rows"])
+ self.assertEqual(len(response.json["res"]), 4)
+
+ def test_ad(self):
+ """test anomaly detect api"""
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000,
+ 1577808010000, 1577808011000, 1577808012000, 1577808013000, 1577808014000,
+ 1577808015000, 1577808016000],
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 22, 8, 21, 28, 11, 9, 29, 40]
+ ],
+ "rows": 17,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], 1)
+ self.assertEqual(response.json["algo"], "iqr")
+
+ def test_ad_error_get(self):
+ """1. invalid http method"""
+ response = self.client.get("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000,
+ 1577808010000, 1577808011000, 1577808012000, 1577808013000, 1577808014000,
+ 1577808015000, 1577808016000],
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 22, 8, 21, 28, 11, 9, 29, 40]
+ ],
+ "rows": 17,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 405)
+
+ def test_ad_error_empty_payload(self):
+ """2. list that is going to apply anomaly detection is empty or less value than the threshold
+ , which is [10, 100000]"""
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000],
+ [5, 14, 15, 15, 14, 19, 17, 16, 20]
+ ],
+ "rows": 9,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], -1)
+
+ def test_ad_error_single_col(self):
+ """3. only one column"""
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000]
+ ],
+ "rows": 9,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], -1)
+
+ def test_ad_error_three_cols(self):
+ """4. there are three input columns """
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8],
+ ["val", "INT", 4],
+ ["val1", "INT", 4]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000],
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 44],
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 44]
+ ],
+ "rows": 10,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], -1)
+
+ def test_ad_disorder_cols(self):
+ """5. disorder two columns """
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["val", "INT", 4],
+ ["ts", "TIMESTAMP", 8]
+ ],
+ "data": [
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 44],
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000],
+ ],
+ "rows": 10,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], 2)
+
+ def test_missing_schema(self):
+ """6. missing schema info"""
+ response = self.client.post("/anomaly-detect", json={
+ "data": [
+ [5, 14, 15, 15, 14, 19, 17, 16, 20, 44],
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000],
+ ],
+ "rows": 10,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], -1)
+
+ def test_invalid_schema_info(self):
+ """7. invalid schema info"""
+ response = self.client.post("/anomaly-detect", json={
+ "schema": [
+ ["ts", "TIMESTAMP", 8]
+ ],
+ "data": [
+ [1577808000000, 1577808001000, 1577808002000, 1577808003000, 1577808004000,
+ 1577808005000, 1577808006000, 1577808007000, 1577808008000, 1577808009000],
+ ],
+ "rows": 10,
+ "algo": "iqr"
+ })
+
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json["rows"], -1)
diff --git a/tools/tdgpt/taosanalytics/test/unit_test.py b/tools/tdgpt/taosanalytics/test/unit_test.py
new file mode 100644
index 0000000000..f6ecdf0d5b
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/test/unit_test.py
@@ -0,0 +1,106 @@
+# encoding:utf-8
+# pylint: disable=c0103
+"""unit test module"""
+import os.path
+import unittest
+import sys
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)) + "/../../")
+
+from taosanalytics.servicemgmt import loader
+from taosanalytics.util import convert_results_to_windows, is_white_noise, parse_options, is_stationary
+
+
+class UtilTest(unittest.TestCase):
+ """utility test cases"""
+
+ def test_generate_anomaly_window(self):
+ # Test case 1: Normal input
+ wins = convert_results_to_windows([1, 1, 1, 1, 1, 1, -1, -1, -1, 1, 1, -1],
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
+ print(f"The result window is:{wins}")
+
+ # Assert the number of windows
+ self.assertEqual(len(wins), 2)
+
+ # Assert the first window
+ self.assertListEqual(wins[0], [7, 9])
+
+ # Assert the second window
+ self.assertListEqual(wins[1], [12, 12])
+
+ # Test case 2: Anomaly input list is empty
+ wins = convert_results_to_windows([], [1, 2])
+ self.assertListEqual(wins, [])
+
+ # Test case 3: Anomaly input list is None
+ wins = convert_results_to_windows([], None)
+ self.assertListEqual(wins, [])
+
+ # Test case 4: Timestamp list is None
+ wins = convert_results_to_windows(None, [])
+ self.assertListEqual(wins, [])
+
+ def test_validate_input_data(self):
+ pass
+
+ def test_validate_pay_load(self):
+ pass
+
+ def test_validate_forecast_input_data(self):
+ pass
+
+ def test_convert_results_to_windows(self):
+ pass
+
+ def test_is_white_noise(self):
+ """
+ Test the is_white_noise function.
+ This function tests the functionality of the is_white_noise function by providing a list and asserting the expected result.
+ """
+ list1 = []
+ wn = is_white_noise(list1)
+ self.assertFalse(wn)
+
+ def test_is_stationary(self):
+ """test whether data is stationary or not"""
+ st = is_stationary([1, 2, 3, 4, 5, 7, 5, 1, 54, 3, 6, 87, 45, 14, 24])
+ self.assertEquals(st, False)
+
+ def test_parse_options(self):
+ """test case for parse key/value string into k/v pair"""
+ option_str = "algo=ksigma,k=2,invalid_option=invalid_str"
+ opt = parse_options(option_str)
+
+ self.assertEqual(len(opt), 3)
+ self.assertDictEqual(opt, {'algo': 'ksigma', 'k': '2', 'invalid_option': 'invalid_str'})
+
+ def test_get_data_index(self):
+ """ test the get the data index method"""
+ schema = [
+ ["val", "INT", 4],
+ ["ts", "TIMESTAMP", 8]
+ ]
+ for index, val in enumerate(schema):
+ if val[0] == "val":
+ return index
+
+
+class ServiceTest(unittest.TestCase):
+ def setUp(self):
+ """ load all service before start unit test """
+ loader.load_all_service()
+
+ def test_get_all_algos(self):
+ service_list = loader.get_service_list()
+ self.assertEqual(len(service_list["details"]), 2)
+
+ for item in service_list["details"]:
+ if item["type"] == "anomaly-detection":
+ self.assertEqual(len(item["algo"]), 6)
+ else:
+ self.assertEqual(len(item["algo"]), 2)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/tdgpt/taosanalytics/util.py b/tools/tdgpt/taosanalytics/util.py
new file mode 100644
index 0000000000..b9b292c3b4
--- /dev/null
+++ b/tools/tdgpt/taosanalytics/util.py
@@ -0,0 +1,126 @@
+# encoding:utf-8
+"""utility methods to helper query processing"""
+import numpy as np
+from statsmodels.stats.diagnostic import acorr_ljungbox
+from statsmodels.tsa.stattools import adfuller
+
+from taosanalytics.conf import app_logger
+
+
+def validate_pay_load(json_obj):
+ """ validate the input payload """
+ if "data" not in json_obj:
+ raise ValueError('data attr does not exist in json')
+
+ data = json_obj["data"]
+
+ if len(data) <= 1:
+ raise ValueError('only one column, primary timestamp column should be provided')
+
+ if len(data) > 2:
+ raise ValueError('too many columns')
+
+ rows = len(data[0])
+
+ if rows != len(data[1]):
+ raise ValueError('data inconsistent, number of rows are not identical')
+
+ if rows < 10 or rows > 40000:
+ raise ValueError(f'number of rows should between 10 and 40000, actual {rows} rows')
+
+ if "schema" not in json_obj:
+ raise ValueError('schema is missing')
+
+ index = get_data_index(json_obj["schema"])
+ if index == -1:
+ raise ValueError('invalid schema info, data column is missing')
+
+
+def convert_results_to_windows(result, ts_list):
+ """generate the window according to anomaly detection result"""
+ skey, ekey = -1, -1
+ wins = []
+
+ if ts_list is None or result is None or len(result) != len(ts_list):
+ return wins
+
+ for index, val in enumerate(result):
+ if val == -1:
+ ekey = ts_list[index]
+ if skey == -1:
+ skey = ts_list[index]
+ else:
+ if ekey != -1:
+ wins.append([skey, ekey])
+ skey, ekey = -1, -1
+
+ if ekey != -1:
+ wins.append([skey, ekey])
+
+ return wins
+
+
+def is_white_noise(input_list):
+ """ determine whether the input list is a white noise list or not """
+ if len(input_list) < 16: # the number of items in the list is insufficient
+ return False
+
+ res = acorr_ljungbox(input_list, lags=[6, 12, 16], boxpierce=True, return_df=True)
+ q_lb = res.lb_pvalue.array[2]
+ return q_lb >= 0.05
+
+
+def is_stationary(input_list):
+ """ determine whether the input list is weak stationary or not """
+ adf, pvalue, usedlag, nobs, critical_values, _ = adfuller(input_list, autolag='AIC')
+ app_logger.log_inst.info("adf is:%f critical value is:%s" % (adf, critical_values))
+ return pvalue < 0.05
+
+
+def parse_options(option_str) -> dict:
+ """
+ the option format is like the following string: "algo=ksigma,k=2,invalid_option=invalid_str"
+ convert it to the dict format
+ """
+ options = {}
+
+ if option_str is None or len(option_str) == 0:
+ return options
+
+ opt_list = option_str.split(",")
+ for line in opt_list:
+ if "=" not in line or len(line.strip()) < 3:
+ continue
+
+ kv_pair = line.strip().split("=")
+ if kv_pair[0].strip() == '' or kv_pair[1].strip() == '':
+ continue
+
+ options[kv_pair[0].strip()] = kv_pair[1].strip()
+
+ return options
+
+
+def get_data_index(schema):
+ """get the data index according to the schema info"""
+ for index, val in enumerate(schema):
+ if val[0] == "val":
+ return index
+
+ return -1
+
+
+def get_ts_index(schema):
+ """get the timestamp index according to the schema info"""
+ for index, val in enumerate(schema):
+ if val[0] == "ts":
+ return index
+ return -1
+
+
+def create_sequences(values, time_steps):
+ """ create sequences for training model """
+ output = []
+ for i in range(len(values) - time_steps + 1):
+ output.append(values[i: (i + time_steps)])
+ return np.stack(output)
From f6ac51b184f67a5271fc039e87210a617781a54f Mon Sep 17 00:00:00 2001
From: dmchen
Date: Thu, 6 Mar 2025 19:19:02 +0800
Subject: [PATCH 094/105] feat: TS-5927-add-doc
---
docs/en/14-reference/01-components/01-taosd.md | 1 +
docs/zh/14-reference/01-components/01-taosd.md | 7 +++++++
2 files changed, 8 insertions(+)
diff --git a/docs/en/14-reference/01-components/01-taosd.md b/docs/en/14-reference/01-components/01-taosd.md
index b3230fac32..4527a7fcac 100644
--- a/docs/en/14-reference/01-components/01-taosd.md
+++ b/docs/en/14-reference/01-components/01-taosd.md
@@ -231,6 +231,7 @@ The effective value of charset is UTF-8.
|udf | |Supported, effective after restart|Whether to start UDF service; 0: do not start, 1: start; default value 0 |
|udfdResFuncs | |Supported, effective after restart|Internal parameter, for setting UDF result sets|
|udfdLdLibPath | |Supported, effective after restart|Internal parameter, indicates the library path for loading UDF|
+|enableStrongPassword | After 3.3.5.0 |Supported, effective after restart|The password include at least three types of characters from the following: uppercase letters, lowercase letters, numbers, and special characters, special characters include `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? \| ~ , .`; 0: disable, 1: enable; default value 1 |
### Stream Computing Parameters
diff --git a/docs/zh/14-reference/01-components/01-taosd.md b/docs/zh/14-reference/01-components/01-taosd.md
index 5ae2640541..2216cab915 100644
--- a/docs/zh/14-reference/01-components/01-taosd.md
+++ b/docs/zh/14-reference/01-components/01-taosd.md
@@ -1017,6 +1017,13 @@ charset 的有效值是 UTF-8。
- 动态修改:支持通过 SQL 修改,重启生效
- 支持版本:v3.1.0.0 引入
+#### enableStrongPassword
+- 说明:密码要符合一个要求:至少包含大写字母、小写字母、数字、特殊字符中的三类。特殊字符包括 `! @ # $ % ^ & * ( ) - _ + = [ ] { } : ; > < ? | ~ , .`
+- 类型:整数;0:不启用,1:启用
+- 默认值:1
+- 动态修改:支持通过 SQL 修改,重启生效
+- 支持版本:v3.3.5.0 引入
+
### 流计算参数
#### disableStream
From 944f3535b651f124adf1c07a41b988f35ceb1fad Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 19:31:40 +0800
Subject: [PATCH 095/105] fix: compile error in win32
---
tests/taosc_test/CMakeLists.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/taosc_test/CMakeLists.txt b/tests/taosc_test/CMakeLists.txt
index 45c14f84bf..f622d1ccde 100644
--- a/tests/taosc_test/CMakeLists.txt
+++ b/tests/taosc_test/CMakeLists.txt
@@ -16,7 +16,7 @@ aux_source_directory(src OS_SRC)
# taoscTest
add_executable(taoscTest "taoscTest.cpp")
-target_link_libraries(taoscTest ${TAOS_LIB} os gtest_main)
+target_link_libraries(taoscTest PUBLIC ${TAOS_LIB} os util common gtest_main)
target_include_directories(
taoscTest
PUBLIC "${TD_SOURCE_DIR}/include/os"
From 53b9743ac4e87b191755b211cdb398e4382180c4 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 20:03:13 +0800
Subject: [PATCH 096/105] feat: enhanced time format verification
---
.../tools/benchmark/basic/json/csv-export.json | 2 +-
tools/taos-tools/src/benchCsv.c | 18 ++++++++++++++++++
2 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/tests/army/tools/benchmark/basic/json/csv-export.json b/tests/army/tools/benchmark/basic/json/csv-export.json
index 2dbe2300a8..2d6f7b7022 100644
--- a/tests/army/tools/benchmark/basic/json/csv-export.json
+++ b/tests/army/tools/benchmark/basic/json/csv-export.json
@@ -1,6 +1,6 @@
{
"filetype": "csvfile",
- "output_path": "./csv/",
+ "output_dir": "./csv/",
"databases": [
{
"dbinfo": {
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 0bb47b0888..39f1a7983f 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -60,6 +60,24 @@ static int csvValidateParamTsFormat(const char* csv_ts_format) {
return -1;
}
+ int has_Y = 0, has_m = 0, has_d = 0;
+ const char* p = csv_ts_format;
+ while (*p) {
+ if (*p == '%') {
+ p++;
+ switch (*p) {
+ case 'Y': has_Y = 1; break;
+ case 'm': has_m = 1; break;
+ case 'd': has_d = 1; break;
+ }
+ }
+ p++;
+ }
+
+ if (has_Y == 0 || has_m == 0 || has_d == 0) {
+ return -1;
+ }
+
return 0;
}
From 4e79e8f489890e3bb39dd69a9f0f64b1a163d357 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Thu, 6 Mar 2025 20:18:49 +0800
Subject: [PATCH 097/105] feat: zlib compression is supported only on linux and
mac platforms
---
tools/taos-tools/inc/benchCsv.h | 5 +++++
tools/taos-tools/src/CMakeLists.txt | 4 +++-
tools/taos-tools/src/benchCsv.c | 17 ++++++++++++++---
3 files changed, 22 insertions(+), 4 deletions(-)
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index 624bcadedc..f944600ecb 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -16,7 +16,10 @@
#ifndef INC_BENCHCSV_H_
#define INC_BENCHCSV_H_
+#ifndef _WIN32
#include
+#endif
+
#include "bench.h"
@@ -38,7 +41,9 @@ typedef struct {
CsvCompressionLevel compress_level;
CsvIoError result;
union {
+#ifndef _WIN32
gzFile gf;
+#endif
FILE* fp;
} handle;
} CsvFileHandle;
diff --git a/tools/taos-tools/src/CMakeLists.txt b/tools/taos-tools/src/CMakeLists.txt
index 5bc2703165..93b1530020 100644
--- a/tools/taos-tools/src/CMakeLists.txt
+++ b/tools/taos-tools/src/CMakeLists.txt
@@ -316,6 +316,9 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux" OR ${CMAKE_SYSTEM_NAME} MATCHES "Darwin
ENDIF ()
ENDIF ()
+
+ target_link_libraries(taosBenchmark z)
+
ELSE ()
ADD_DEFINITIONS(-DWINDOWS)
SET(CMAKE_C_STANDARD 11)
@@ -364,4 +367,3 @@ ELSE ()
TARGET_LINK_LIBRARIES(taosBenchmark taos msvcregex pthread toolscJson ${WEBSOCKET_LINK_FLAGS})
ENDIF ()
-target_link_libraries(taosBenchmark z)
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index 39f1a7983f..d08b9d19b0 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -953,12 +953,15 @@ static CsvFileHandle* csvOpen(const char* filename, CsvCompressionLevel compress
if (compress_level == CSV_COMPRESS_NONE) {
fhdl->handle.fp = fopen(filename, "w");
failed = (!fhdl->handle.fp);
- } else {
+ }
+#ifndef _WIN32
+ else {
char mode[TINY_BUFF_LEN];
(void)snprintf(mode, sizeof(mode), "wb%d", compress_level);
fhdl->handle.gf = gzopen(filename, mode);
failed = (!fhdl->handle.gf);
}
+#endif
if (failed) {
tmfree(fhdl);
@@ -986,7 +989,9 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
fhdl->result = CSV_ERR_WRITE_FAILED;
return CSV_ERR_WRITE_FAILED;
}
- } else {
+ }
+#ifndef _WIN32
+ else {
int ret = gzwrite(fhdl->handle.gf, buf, size);
if (ret != size) {
errorPrint("Failed to write csv file: %s. expected written %zu but %d.\n",
@@ -998,6 +1003,8 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
return CSV_ERR_WRITE_FAILED;
}
}
+#endif
+
return CSV_ERR_OK;
}
@@ -1012,12 +1019,16 @@ static void csvClose(CsvFileHandle* fhdl) {
fclose(fhdl->handle.fp);
fhdl->handle.fp = NULL;
}
- } else {
+ }
+#ifndef _WIN32
+ else {
if (fhdl->handle.gf) {
gzclose(fhdl->handle.gf);
fhdl->handle.gf = NULL;
}
}
+#endif
+
tmfree(fhdl);
}
From 6250adb3b1ed4673fc3f9e538ff99a878f86af6c Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Thu, 6 Mar 2025 22:20:52 +0800
Subject: [PATCH 098/105] refactor: rename udfd to taosudf
---
docs/en/07-develop/09-udf.md | 4 +-
docs/en/14-reference/09-error-code.md | 8 +-
docs/zh/07-develop/09-udf.md | 4 +-
include/libs/function/tudf.h | 18 +--
packaging/deb/DEBIAN/prerm | 2 +-
packaging/deb/makedeb.sh | 2 +-
packaging/rpm/tdengine.spec | 4 +-
packaging/tools/install.sh | 4 +-
packaging/tools/make_install.bat | 2 +-
packaging/tools/make_install.sh | 10 +-
packaging/tools/makepkg.sh | 2 +-
packaging/tools/post.sh | 6 +-
packaging/tools/remove.sh | 2 +-
source/common/src/tglobal.c | 2 +-
source/dnode/mgmt/mgmt_dnode/src/dmInt.c | 2 +-
source/libs/function/CMakeLists.txt | 8 +-
source/libs/function/src/tudf.c | 64 ++++-----
source/libs/function/src/udfd.c | 128 +++++++++---------
tests/system-test/0-others/udfTest.py | 22 +--
tests/system-test/0-others/udf_cfg2.py | 12 +-
tests/system-test/0-others/udf_cluster.py | 10 +-
tests/system-test/0-others/udf_create.py | 14 +-
.../system-test/0-others/udf_restart_taosd.py | 12 +-
tests/test_new/udf/udf_create.py | 14 +-
24 files changed, 178 insertions(+), 178 deletions(-)
diff --git a/docs/en/07-develop/09-udf.md b/docs/en/07-develop/09-udf.md
index 0e91dd09db..138378e450 100644
--- a/docs/en/07-develop/09-udf.md
+++ b/docs/en/07-develop/09-udf.md
@@ -495,10 +495,10 @@ taos> select myfun(v1, v2) from t;
DB error: udf function execution failure (0.011088s)
```
-Unfortunately, the execution failed. What could be the reason? Check the udfd process logs.
+Unfortunately, the execution failed. What could be the reason? Check the taosudf process logs.
```shell
-tail -10 /var/log/taos/udfd.log
+tail -10 /var/log/taos/taosudf.log
```
Found the following error messages.
diff --git a/docs/en/14-reference/09-error-code.md b/docs/en/14-reference/09-error-code.md
index 7dbd58bb1d..d071bcdc81 100644
--- a/docs/en/14-reference/09-error-code.md
+++ b/docs/en/14-reference/09-error-code.md
@@ -485,10 +485,10 @@ This document details the server error codes that may be encountered when using
| Error Code | Description | Possible Scenarios or Reasons | Recommended Actions |
| ---------- | ---------------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
| 0x80002901 | udf is stopping | udf call received when dnode exits | Stop executing udf queries |
-| 0x80002902 | udf pipe read error | Error occurred when taosd reads from udfd pipe | udfd unexpectedly exits, 1) C udf crash 2) udfd crash |
-| 0x80002903 | udf pipe connect error | Error establishing pipe connection to udfd in taosd | 1) Corresponding udfd not started in taosd. Restart taosd |
-| 0x80002904 | udf pipe not exist | Connection error occurs between two phases of udf setup, call, and teardown, causing the connection to disappear, subsequent phases continue | udfd unexpectedly exits, 1) C udf crash 2) udfd crash |
-| 0x80002905 | udf load failure | Error loading udf in udfd | 1) udf does not exist in mnode 2) Error in udf loading. Check logs |
+| 0x80002902 | udf pipe read error | Error occurred when taosd reads from taosudf pipe | taosudf unexpectedly exits, 1) C udf crash 2) taosudf crash |
+| 0x80002903 | udf pipe connect error | Error establishing pipe connection to taosudf in taosd | 1) Corresponding taosudf not started in taosd. Restart taosd |
+| 0x80002904 | udf pipe not exist | Connection error occurs between two phases of udf setup, call, and teardown, causing the connection to disappear, subsequent phases continue | taosudf unexpectedly exits, 1) C udf crash 2) taosudf crash |
+| 0x80002905 | udf load failure | Error loading udf in taosudf | 1) udf does not exist in mnode 2) Error in udf loading. Check logs |
| 0x80002906 | udf invalid function input | udf input check | udf function does not accept input, such as wrong column type |
| 0x80002907 | udf invalid bufsize | Intermediate result in udf aggregation function exceeds specified bufsize | Increase bufsize, or reduce intermediate result size |
| 0x80002908 | udf invalid output type | udf output type differs from the type specified when creating udf | Modify udf, or the type when creating udf, to match the result |
diff --git a/docs/zh/07-develop/09-udf.md b/docs/zh/07-develop/09-udf.md
index 55953e69ea..d0f9c93652 100644
--- a/docs/zh/07-develop/09-udf.md
+++ b/docs/zh/07-develop/09-udf.md
@@ -472,10 +472,10 @@ taos> select myfun(v1, v2) from t;
DB error: udf function execution failure (0.011088s)
```
-不幸的是执行失败了,什么原因呢?查看 udfd 进程的日志。
+不幸的是执行失败了,什么原因呢?查看 taosudf 进程的日志。
```shell
-tail -10 /var/log/taos/udfd.log
+tail -10 /var/log/taos/taosudf.log
```
发现以下错误信息。
diff --git a/include/libs/function/tudf.h b/include/libs/function/tudf.h
index 2c7e6216f5..70fc44a91b 100644
--- a/include/libs/function/tudf.h
+++ b/include/libs/function/tudf.h
@@ -37,9 +37,9 @@ extern "C" {
#define UDF_LISTEN_PIPE_NAME_LEN 32
#ifdef _WIN32
-#define UDF_LISTEN_PIPE_NAME_PREFIX "\\\\?\\pipe\\udfd.sock"
+#define UDF_LISTEN_PIPE_NAME_PREFIX "\\\\?\\pipe\\taosudf.sock"
#else
-#define UDF_LISTEN_PIPE_NAME_PREFIX ".udfd.sock."
+#define UDF_LISTEN_PIPE_NAME_PREFIX ".taosudf.sock."
#endif
#define UDF_DNODE_ID_ENV_NAME "DNODE_ID"
@@ -66,7 +66,7 @@ extern "C" {
const void *ptrs[] = {__VA_ARGS__}; \
for (int i = 0; i < sizeof(ptrs) / sizeof(ptrs[0]); ++i) { \
if (ptrs[i] == NULL) { \
- fnError("udfd %dth parameter invalid, NULL PTR.line:%d", i, __LINE__); \
+ fnError("taosudf %dth parameter invalid, NULL PTR.line:%d", i, __LINE__); \
return TSDB_CODE_INVALID_PARA; \
} \
} \
@@ -77,7 +77,7 @@ extern "C" {
const void *ptrs[] = {__VA_ARGS__}; \
for (int i = 0; i < sizeof(ptrs) / sizeof(ptrs[0]); ++i) { \
if (ptrs[i] == NULL) { \
- fnError("udfd %dth parameter invalid, NULL PTR.line:%d", i, __LINE__); \
+ fnError("taosudf %dth parameter invalid, NULL PTR.line:%d", i, __LINE__); \
return; \
} \
} \
@@ -137,31 +137,31 @@ int32_t cleanUpUdfs();
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// udf api
/**
- * create udfd proxy, called once in process that call doSetupUdf/callUdfxxx/doTeardownUdf
+ * create taosudf proxy, called once in process that call doSetupUdf/callUdfxxx/doTeardownUdf
* @return error code
*/
int32_t udfcOpen();
/**
- * destroy udfd proxy
+ * destroy taosudf proxy
* @return error code
*/
int32_t udfcClose();
/**
- * start udfd that serves udf function invocation under dnode startDnodeId
+ * start taosudf that serves udf function invocation under dnode startDnodeId
* @param startDnodeId
* @return
*/
int32_t udfStartUdfd(int32_t startDnodeId);
/**
- * stop udfd
+ * stop taosudf
* @return
*/
void udfStopUdfd();
/**
- * get udfd pid
+ * get taosudf pid
*
*/
// int32_t udfGetUdfdPid(int32_t* pUdfdPid);
diff --git a/packaging/deb/DEBIAN/prerm b/packaging/deb/DEBIAN/prerm
index 31561c5682..cdf68e0d78 100644
--- a/packaging/deb/DEBIAN/prerm
+++ b/packaging/deb/DEBIAN/prerm
@@ -30,7 +30,7 @@ else
# Remove all links
${csudo}rm -f ${bin_link_dir}/taos || :
${csudo}rm -f ${bin_link_dir}/taosd || :
- ${csudo}rm -f ${bin_link_dir}/udfd || :
+ ${csudo}rm -f ${bin_link_dir}/taosudf || :
${csudo}rm -f ${bin_link_dir}/taosadapter || :
${csudo}rm -f ${bin_link_dir}/taosdemo || :
${csudo}rm -f ${bin_link_dir}/taoskeeper || :
diff --git a/packaging/deb/makedeb.sh b/packaging/deb/makedeb.sh
index 93f523a13f..d4616f29ff 100755
--- a/packaging/deb/makedeb.sh
+++ b/packaging/deb/makedeb.sh
@@ -103,7 +103,7 @@ sed -i "s/versionType=\"enterprise\"/versionType=\"community\"/g" ${pkg_dir}${in
cp ${compile_dir}/build/bin/taosd ${pkg_dir}${install_home_path}/bin
-cp ${compile_dir}/build/bin/udfd ${pkg_dir}${install_home_path}/bin
+cp ${compile_dir}/build/bin/taosudf ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosBenchmark ${pkg_dir}${install_home_path}/bin
cp ${compile_dir}/build/bin/taosdump ${pkg_dir}${install_home_path}/bin
diff --git a/packaging/rpm/tdengine.spec b/packaging/rpm/tdengine.spec
index bfa91b6af7..ff576949c7 100644
--- a/packaging/rpm/tdengine.spec
+++ b/packaging/rpm/tdengine.spec
@@ -92,7 +92,7 @@ cp %{_compiledir}/../packaging/tools/set_core.sh %{buildroot}%{homepath}/bin
cp %{_compiledir}/../packaging/tools/taosd-dump-cfg.gdb %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taos %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosd %{buildroot}%{homepath}/bin
-cp %{_compiledir}/build/bin/udfd %{buildroot}%{homepath}/bin
+cp %{_compiledir}/build/bin/taosudf %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosBenchmark %{buildroot}%{homepath}/bin
cp %{_compiledir}/build/bin/taosdump %{buildroot}%{homepath}/bin
cp %{_compiledir}/../../enterprise/packaging/start-all.sh %{buildroot}%{homepath}/bin
@@ -233,7 +233,7 @@ if [ $1 -eq 0 ];then
# Remove all links
${csudo}rm -f ${bin_link_dir}/taos || :
${csudo}rm -f ${bin_link_dir}/taosd || :
- ${csudo}rm -f ${bin_link_dir}/udfd || :
+ ${csudo}rm -f ${bin_link_dir}/taosudf || :
${csudo}rm -f ${bin_link_dir}/taosadapter || :
${csudo}rm -f ${bin_link_dir}/taoskeeper || :
${csudo}rm -f ${bin_link_dir}/taosdump || :
diff --git a/packaging/tools/install.sh b/packaging/tools/install.sh
index a6fd69d16f..5e554bb7d8 100755
--- a/packaging/tools/install.sh
+++ b/packaging/tools/install.sh
@@ -19,7 +19,7 @@ script_dir=$(dirname $(readlink -f "$0"))
PREFIX="taos"
clientName="${PREFIX}"
serverName="${PREFIX}d"
-udfdName="udfd"
+udfdName="taosudf"
configFile="${PREFIX}.cfg"
productName="TDengine"
emailName="taosdata.com"
@@ -156,7 +156,7 @@ done
#echo "verType=${verType} interactiveFqdn=${interactiveFqdn}"
-tools=(${clientName} ${benchmarkName} ${dumpName} ${demoName} remove.sh udfd set_core.sh TDinsight.sh start_pre.sh start-all.sh stop-all.sh)
+tools=(${clientName} ${benchmarkName} ${dumpName} ${demoName} remove.sh taosudf set_core.sh TDinsight.sh start_pre.sh start-all.sh stop-all.sh)
if [ "${verMode}" == "cluster" ]; then
services=(${serverName} ${adapterName} ${xname} ${explorerName} ${keeperName})
elif [ "${verMode}" == "edge" ]; then
diff --git a/packaging/tools/make_install.bat b/packaging/tools/make_install.bat
index 04d342ea06..e5b29b9557 100644
--- a/packaging/tools/make_install.bat
+++ b/packaging/tools/make_install.bat
@@ -114,7 +114,7 @@ if %Enterprise% == TRUE (
)
copy %binary_dir%\\build\\bin\\taosd.exe %target_dir% > nul
-copy %binary_dir%\\build\\bin\\udfd.exe %target_dir% > nul
+copy %binary_dir%\\build\\bin\\taosudf.exe %target_dir% > nul
if exist %binary_dir%\\build\\bin\\taosadapter.exe (
copy %binary_dir%\\build\\bin\\taosadapter.exe %target_dir% > nul
)
diff --git a/packaging/tools/make_install.sh b/packaging/tools/make_install.sh
index bb61392f80..f44a46d862 100755
--- a/packaging/tools/make_install.sh
+++ b/packaging/tools/make_install.sh
@@ -171,7 +171,7 @@ function install_bin() {
${csudo}rm -f ${bin_link_dir}/${serverName} || :
${csudo}rm -f ${bin_link_dir}/taosadapter || :
${csudo}rm -f ${bin_link_dir}/taoskeeper || :
- ${csudo}rm -f ${bin_link_dir}/udfd || :
+ ${csudo}rm -f ${bin_link_dir}/taosudf || :
${csudo}rm -f ${bin_link_dir}/taosdemo || :
${csudo}rm -f ${bin_link_dir}/taosdump || :
${csudo}rm -f ${bin_link_dir}/${uninstallScript} || :
@@ -186,7 +186,7 @@ function install_bin() {
[ -f ${binary_dir}/build/bin/taosdump ] && ${csudo}cp -r ${binary_dir}/build/bin/taosdump ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taosadapter ] && ${csudo}cp -r ${binary_dir}/build/bin/taosadapter ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taoskeeper ] && ${csudo}cp -r ${binary_dir}/build/bin/taoskeeper ${install_main_dir}/bin || :
- [ -f ${binary_dir}/build/bin/udfd ] && ${csudo}cp -r ${binary_dir}/build/bin/udfd ${install_main_dir}/bin || :
+ [ -f ${binary_dir}/build/bin/taosudf ] && ${csudo}cp -r ${binary_dir}/build/bin/taosudf ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taosx ] && ${csudo}cp -r ${binary_dir}/build/bin/taosx ${install_main_dir}/bin || :
${csudo}cp -r ${binary_dir}/build/bin/${serverName} ${install_main_dir}/bin || :
@@ -201,7 +201,7 @@ function install_bin() {
[ -x ${install_main_dir}/bin/${serverName} ] && ${csudo}ln -s ${install_main_dir}/bin/${serverName} ${bin_link_dir}/${serverName} > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosadapter ] && ${csudo}ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taoskeeper ] && ${csudo}ln -s ${install_main_dir}/bin/taoskeeper ${bin_link_dir}/taoskeeper > /dev/null 2>&1 || :
- [ -x ${install_main_dir}/bin/udfd ] && ${csudo}ln -s ${install_main_dir}/bin/udfd ${bin_link_dir}/udfd > /dev/null 2>&1 || :
+ [ -x ${install_main_dir}/bin/taosudf ] && ${csudo}ln -s ${install_main_dir}/bin/taosudf ${bin_link_dir}/taosudf > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo}ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosdemo ] && ${csudo}ln -s ${install_main_dir}/bin/taosdemo ${bin_link_dir}/taosdemo > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosx ] && ${csudo}ln -s ${install_main_dir}/bin/taosx ${bin_link_dir}/taosx > /dev/null 2>&1 || :
@@ -216,7 +216,7 @@ function install_bin() {
[ -f ${binary_dir}/build/bin/taosdump ] && ${csudo}cp -r ${binary_dir}/build/bin/taosdump ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taosadapter ] && ${csudo}cp -r ${binary_dir}/build/bin/taosadapter ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taoskeeper ] && ${csudo}cp -r ${binary_dir}/build/bin/taoskeeper ${install_main_dir}/bin || :
- [ -f ${binary_dir}/build/bin/udfd ] && ${csudo}cp -r ${binary_dir}/build/bin/udfd ${install_main_dir}/bin || :
+ [ -f ${binary_dir}/build/bin/taosudf ] && ${csudo}cp -r ${binary_dir}/build/bin/taosudf ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/taosx ] && ${csudo}cp -r ${binary_dir}/build/bin/taosx ${install_main_dir}/bin || :
[ -f ${binary_dir}/build/bin/*explorer ] && ${csudo}cp -r ${binary_dir}/build/bin/*explorer ${install_main_dir}/bin || :
${csudo}cp -r ${binary_dir}/build/bin/${serverName} ${install_main_dir}/bin || :
@@ -228,7 +228,7 @@ function install_bin() {
[ -x ${install_main_dir}/bin/${serverName} ] && ${csudo}ln -s ${install_main_dir}/bin/${serverName} ${bin_link_dir}/${serverName} > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosadapter ] && ${csudo}ln -s ${install_main_dir}/bin/taosadapter ${bin_link_dir}/taosadapter > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taoskeeper ] && ${csudo}ln -s ${install_main_dir}/bin/taoskeeper ${bin_link_dir}/taoskeeper > /dev/null 2>&1 || :
- [ -x ${install_main_dir}/bin/udfd ] && ${csudo}ln -s ${install_main_dir}/bin/udfd ${bin_link_dir}/udfd > /dev/null 2>&1 || :
+ [ -x ${install_main_dir}/bin/taosudf ] && ${csudo}ln -s ${install_main_dir}/bin/taosudf ${bin_link_dir}/taosudf > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosdump ] && ${csudo}ln -s ${install_main_dir}/bin/taosdump ${bin_link_dir}/taosdump > /dev/null 2>&1 || :
[ -f ${install_main_dir}/bin/taosBenchmark ] && ${csudo}ln -sf ${install_main_dir}/bin/taosBenchmark ${install_main_dir}/bin/taosdemo > /dev/null 2>&1 || :
[ -x ${install_main_dir}/bin/taosx ] && ${csudo}ln -s ${install_main_dir}/bin/taosx ${bin_link_dir}/taosx > /dev/null 2>&1 || :
diff --git a/packaging/tools/makepkg.sh b/packaging/tools/makepkg.sh
index fb461835b4..7ef7903137 100755
--- a/packaging/tools/makepkg.sh
+++ b/packaging/tools/makepkg.sh
@@ -98,7 +98,7 @@ else
${taostools_bin_files} \
${build_dir}/bin/${clientName}adapter \
${build_dir}/bin/${clientName}keeper \
- ${build_dir}/bin/udfd \
+ ${build_dir}/bin/taosudf \
${script_dir}/remove.sh \
${script_dir}/set_core.sh \
${script_dir}/startPre.sh \
diff --git a/packaging/tools/post.sh b/packaging/tools/post.sh
index 6b4c96e8c4..68e3df8138 100755
--- a/packaging/tools/post.sh
+++ b/packaging/tools/post.sh
@@ -233,7 +233,7 @@ function install_bin() {
log_print "start install bin from ${bin_dir} to ${bin_link_dir}"
${csudo}rm -f ${bin_link_dir}/taos || :
${csudo}rm -f ${bin_link_dir}/taosd || :
- ${csudo}rm -f ${bin_link_dir}/udfd || :
+ ${csudo}rm -f ${bin_link_dir}/taosudf || :
${csudo}rm -f ${bin_link_dir}/taosadapter || :
${csudo}rm -f ${bin_link_dir}/taosBenchmark || :
${csudo}rm -f ${bin_link_dir}/taoskeeper || :
@@ -255,8 +255,8 @@ function install_bin() {
if [ -x ${bin_dir}/taosd ]; then
${csudo}ln -s ${bin_dir}/taosd ${bin_link_dir}/taosd 2>>${install_log_path} || return 1
fi
- if [ -x ${bin_dir}/udfd ]; then
- ${csudo}ln -s ${bin_dir}/udfd ${bin_link_dir}/udfd 2>>${install_log_path} || return 1
+ if [ -x ${bin_dir}/taosudf ]; then
+ ${csudo}ln -s ${bin_dir}/taosudf ${bin_link_dir}/taosudf 2>>${install_log_path} || return 1
fi
if [ -x ${bin_dir}/taosadapter ]; then
${csudo}ln -s ${bin_dir}/taosadapter ${bin_link_dir}/taosadapter 2>>${install_log_path} || return 1
diff --git a/packaging/tools/remove.sh b/packaging/tools/remove.sh
index 43c2de4ba4..b7bda31e13 100755
--- a/packaging/tools/remove.sh
+++ b/packaging/tools/remove.sh
@@ -61,7 +61,7 @@ if [ "${verMode}" == "cluster" ]; then
else
services=(${PREFIX}"d" ${PREFIX}"adapter" ${PREFIX}"keeper" ${PREFIX}"-explorer")
fi
-tools=(${PREFIX} ${PREFIX}"Benchmark" ${PREFIX}"dump" ${PREFIX}"demo" udfd set_core.sh TDinsight.sh $uninstallScript start-all.sh stop-all.sh)
+tools=(${PREFIX} ${PREFIX}"Benchmark" ${PREFIX}"dump" ${PREFIX}"demo" taosudf set_core.sh TDinsight.sh $uninstallScript start-all.sh stop-all.sh)
csudo=""
if command -v sudo >/dev/null; then
diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c
index 50b2514ad0..00f9504bc9 100644
--- a/source/common/src/tglobal.c
+++ b/source/common/src/tglobal.c
@@ -319,7 +319,7 @@ int32_t tsS3MigrateIntervalSec = 60 * 60; // interval of s3migrate db in all vg
bool tsS3MigrateEnabled = 0;
int32_t tsGrantHBInterval = 60;
int32_t tsUptimeInterval = 300; // seconds
-char tsUdfdResFuncs[512] = ""; // udfd resident funcs that teardown when udfd exits
+char tsUdfdResFuncs[512] = ""; // taosudf resident funcs that teardown when taosudf exits
char tsUdfdLdLibPath[512] = "";
bool tsDisableStream = false;
int64_t tsStreamBufferSize = 128 * 1024 * 1024;
diff --git a/source/dnode/mgmt/mgmt_dnode/src/dmInt.c b/source/dnode/mgmt/mgmt_dnode/src/dmInt.c
index ed6aff1b13..8797440f54 100644
--- a/source/dnode/mgmt/mgmt_dnode/src/dmInt.c
+++ b/source/dnode/mgmt/mgmt_dnode/src/dmInt.c
@@ -88,7 +88,7 @@ static int32_t dmOpenMgmt(SMgmtInputOpt *pInput, SMgmtOutputOpt *pOutput) {
}
if ((code = udfStartUdfd(pMgmt->pData->dnodeId)) != 0) {
- dError("failed to start udfd since %s", tstrerror(code));
+ dError("failed to start taosudf since %s", tstrerror(code));
}
if ((code = taosAnalyticsInit()) != 0) {
diff --git a/source/libs/function/CMakeLists.txt b/source/libs/function/CMakeLists.txt
index 4e3c8dddab..62989bb293 100644
--- a/source/libs/function/CMakeLists.txt
+++ b/source/libs/function/CMakeLists.txt
@@ -33,14 +33,14 @@ target_link_libraries(
PUBLIC uv_a
)
-add_executable(udfd src/udfd.c)
+add_executable(taosudf src/udfd.c)
if(${TD_DARWIN})
- target_compile_options(udfd PRIVATE -Wno-error=deprecated-non-prototype)
+ target_compile_options(taosudf PRIVATE -Wno-error=deprecated-non-prototype)
endif()
target_include_directories(
- udfd
+ taosudf
PUBLIC
"${TD_SOURCE_DIR}/include/libs/function"
"${TD_SOURCE_DIR}/contrib/libuv/include"
@@ -52,7 +52,7 @@ target_include_directories(
)
target_link_libraries(
- udfd
+ taosudf
PUBLIC uv_a
PRIVATE os util common nodes function
)
diff --git a/source/libs/function/src/tudf.c b/source/libs/function/src/tudf.c
index 5c5a98cb2e..747152b8c3 100644
--- a/source/libs/function/src/tudf.c
+++ b/source/libs/function/src/tudf.c
@@ -63,25 +63,25 @@ static void udfWatchUdfd(void *args);
void udfUdfdExit(uv_process_t *process, int64_t exitStatus, int32_t termSignal) {
TAOS_UDF_CHECK_PTR_RVOID(process);
- fnInfo("udfd process exited with status %" PRId64 ", signal %d", exitStatus, termSignal);
+ fnInfo("taosudf process exited with status %" PRId64 ", signal %d", exitStatus, termSignal);
SUdfdData *pData = process->data;
if(pData == NULL) {
- fnError("udfd process data is NULL");
+ fnError("taosudf process data is NULL");
return;
}
if (exitStatus == 0 && termSignal == 0 || atomic_load_32(&pData->stopCalled)) {
- fnInfo("udfd process exit due to SIGINT or dnode-mgmt called stop");
+ fnInfo("taosudf process exit due to SIGINT or dnode-mgmt called stop");
} else {
- fnInfo("udfd process restart");
+ fnInfo("taosudf process restart");
int32_t code = udfSpawnUdfd(pData);
if (code != 0) {
- fnError("udfd process restart failed with code:%d", code);
+ fnError("taosudf process restart failed with code:%d", code);
}
}
}
static int32_t udfSpawnUdfd(SUdfdData *pData) {
- fnInfo("start to init udfd");
+ fnInfo("start to init taosudf");
TAOS_UDF_CHECK_PTR_RCODE(pData);
int32_t err = 0;
@@ -106,12 +106,12 @@ static int32_t udfSpawnUdfd(SUdfdData *pData) {
if (strlen(path) == 0) {
TAOS_STRCAT(path, "C:\\TDengine");
}
- TAOS_STRCAT(path, "\\udfd.exe");
+ TAOS_STRCAT(path, "\\taosudf.exe");
#else
if (strlen(path) == 0) {
TAOS_STRCAT(path, "/usr/bin");
}
- TAOS_STRCAT(path, "/udfd");
+ TAOS_STRCAT(path, "/taosudf");
#endif
char *argsUdfd[] = {path, "-c", configDir, NULL};
options.args = argsUdfd;
@@ -158,9 +158,9 @@ static int32_t udfSpawnUdfd(SUdfdData *pData) {
udfdPathLdLib[udfdLdLibPathLen] = ':';
tstrncpy(udfdPathLdLib + udfdLdLibPathLen + 1, pathTaosdLdLib, sizeof(udfdPathLdLib) - udfdLdLibPathLen - 1);
if (udfdLdLibPathLen + taosdLdLibPathLen < 1024) {
- fnInfo("[UDFD]udfd LD_LIBRARY_PATH: %s", udfdPathLdLib);
+ fnInfo("[UDFD]taosudf LD_LIBRARY_PATH: %s", udfdPathLdLib);
} else {
- fnError("[UDFD]can not set correct udfd LD_LIBRARY_PATH");
+ fnError("[UDFD]can not set correct taosudf LD_LIBRARY_PATH");
}
char ldLibPathEnvItem[1024 + 32] = {0};
snprintf(ldLibPathEnvItem, 1024 + 32, "%s=%s", "LD_LIBRARY_PATH", udfdPathLdLib);
@@ -231,12 +231,12 @@ static int32_t udfSpawnUdfd(SUdfdData *pData) {
pData->process.data = (void *)pData;
#ifdef WINDOWS
- // End udfd.exe by Job.
+ // End taosudf.exe by Job.
if (pData->jobHandle != NULL) CloseHandle(pData->jobHandle);
pData->jobHandle = CreateJobObject(NULL, NULL);
bool add_job_ok = AssignProcessToJobObject(pData->jobHandle, pData->process.process_handle);
if (!add_job_ok) {
- fnError("Assign udfd to job failed.");
+ fnError("Assign taosudf to job failed.");
} else {
JOBOBJECT_EXTENDED_LIMIT_INFORMATION limit_info;
memset(&limit_info, 0x0, sizeof(limit_info));
@@ -244,15 +244,15 @@ static int32_t udfSpawnUdfd(SUdfdData *pData) {
bool set_auto_kill_ok =
SetInformationJobObject(pData->jobHandle, JobObjectExtendedLimitInformation, &limit_info, sizeof(limit_info));
if (!set_auto_kill_ok) {
- fnError("Set job auto kill udfd failed.");
+ fnError("Set job auto kill taosudf failed.");
}
}
#endif
if (err != 0) {
- fnError("can not spawn udfd. path: %s, error: %s", path, uv_strerror(err));
+ fnError("can not spawn taosudf. path: %s, error: %s", path, uv_strerror(err));
} else {
- fnInfo("udfd is initialized");
+ fnInfo("taosudf is initialized");
}
_OVER:
@@ -295,13 +295,13 @@ static void udfWatchUdfd(void *args) {
atomic_store_32(&pData->spawnErr, 0);
(void)uv_barrier_wait(&pData->barrier);
int32_t num = uv_run(&pData->loop, UV_RUN_DEFAULT);
- fnInfo("udfd loop exit with %d active handles, line:%d", num, __LINE__);
+ fnInfo("taosudf loop exit with %d active handles, line:%d", num, __LINE__);
uv_walk(&pData->loop, udfUdfdCloseWalkCb, NULL);
num = uv_run(&pData->loop, UV_RUN_DEFAULT);
- fnInfo("udfd loop exit with %d active handles, line:%d", num, __LINE__);
+ fnInfo("taosudf loop exit with %d active handles, line:%d", num, __LINE__);
if (uv_loop_close(&pData->loop) != 0) {
- fnError("udfd loop close failed, lino:%d", __LINE__);
+ fnError("taosudf loop close failed, lino:%d", __LINE__);
}
return;
@@ -310,9 +310,9 @@ _exit:
(void)uv_barrier_wait(&pData->barrier);
atomic_store_32(&pData->spawnErr, terrno);
if (uv_loop_close(&pData->loop) != 0) {
- fnError("udfd loop close failed, lino:%d", __LINE__);
+ fnError("taosudf loop close failed, lino:%d", __LINE__);
}
- fnError("udfd thread exit with code:%d lino:%d", terrno, terrln);
+ fnError("taosudf thread exit with code:%d lino:%d", terrno, terrln);
terrno = TSDB_CODE_UDF_UV_EXEC_FAILURE;
}
return;
@@ -321,11 +321,11 @@ _exit:
int32_t udfStartUdfd(int32_t startDnodeId) {
int32_t code = 0, lino = 0;
if (!tsStartUdfd) {
- fnInfo("start udfd is disabled.") return 0;
+ fnInfo("start taosudf is disabled.") return 0;
}
SUdfdData *pData = &udfdGlobal;
if (pData->startCalled) {
- fnInfo("dnode start udfd already called");
+ fnInfo("dnode start taosudf already called");
return 0;
}
pData->startCalled = true;
@@ -341,27 +341,27 @@ int32_t udfStartUdfd(int32_t startDnodeId) {
if (err != 0) {
uv_barrier_destroy(&pData->barrier);
if (uv_async_send(&pData->stopAsync) != 0) {
- fnError("start udfd: failed to send stop async");
+ fnError("start taosudf: failed to send stop async");
}
if (uv_thread_join(&pData->thread) != 0) {
- fnError("start udfd: failed to join udfd thread");
+ fnError("start taosudf: failed to join taosudf thread");
}
pData->needCleanUp = false;
- fnInfo("udfd is cleaned up after spawn err");
+ fnInfo("taosudf is cleaned up after spawn err");
TAOS_CHECK_GOTO(err, &lino, _exit);
} else {
pData->needCleanUp = true;
}
_exit:
if (code != 0) {
- fnError("udfd start failed with code:%d, lino:%d", code, lino);
+ fnError("taosudf start failed with code:%d, lino:%d", code, lino);
}
return code;
}
void udfStopUdfd() {
SUdfdData *pData = &udfdGlobal;
- fnInfo("udfd start to stop, need cleanup:%d, spawn err:%d", pData->needCleanUp, pData->spawnErr);
+ fnInfo("taosudf start to stop, need cleanup:%d, spawn err:%d", pData->needCleanUp, pData->spawnErr);
if (!pData->needCleanUp || atomic_load_32(&pData->stopCalled)) {
return;
}
@@ -369,16 +369,16 @@ void udfStopUdfd() {
pData->needCleanUp = false;
uv_barrier_destroy(&pData->barrier);
if (uv_async_send(&pData->stopAsync) != 0) {
- fnError("stop udfd: failed to send stop async");
+ fnError("stop taosudf: failed to send stop async");
}
if (uv_thread_join(&pData->thread) != 0) {
- fnError("stop udfd: failed to join udfd thread");
+ fnError("stop taosudf: failed to join taosudf thread");
}
#ifdef WINDOWS
if (pData->jobHandle != NULL) CloseHandle(pData->jobHandle);
#endif
- fnInfo("udfd is cleaned up");
+ fnInfo("taosudf is cleaned up");
return;
}
@@ -2152,7 +2152,7 @@ int32_t callUdf(UdfcFuncHandle handle, int8_t callType, SSDataBlock *input, SUdf
fnDebug("udfc call udf. callType: %d, funcHandle: %p", callType, handle);
SUdfcUvSession *session = (SUdfcUvSession *)handle;
if (session->udfUvPipe == NULL) {
- fnError("No pipe to udfd");
+ fnError("No pipe to taosudf");
return TSDB_CODE_UDF_PIPE_NOT_EXIST;
}
SClientUdfTask *task = taosMemoryCalloc(1, sizeof(SClientUdfTask));
@@ -2282,7 +2282,7 @@ int32_t doTeardownUdf(UdfcFuncHandle handle) {
SUdfcUvSession *session = (SUdfcUvSession *)handle;
if (session->udfUvPipe == NULL) {
- fnError("tear down udf. pipe to udfd does not exist. udf name: %s", session->udfName);
+ fnError("tear down udf. pipe to taosudf does not exist. udf name: %s", session->udfName);
taosMemoryFree(session);
return TSDB_CODE_UDF_PIPE_NOT_EXIST;
}
diff --git a/source/libs/function/src/udfd.c b/source/libs/function/src/udfd.c
index fcc4c337f6..4bdb67a750 100644
--- a/source/libs/function/src/udfd.c
+++ b/source/libs/function/src/udfd.c
@@ -166,7 +166,7 @@ int32_t udfdCPluginUdfScalarProc(SUdfDataBlock *block, SUdfColumn *resultCol, vo
if (ctx->scalarProcFunc) {
return ctx->scalarProcFunc(block, resultCol);
} else {
- fnError("udfd c plugin scalar proc not implemented");
+ fnError("taosudf c plugin scalar proc not implemented");
return TSDB_CODE_UDF_FUNC_EXEC_FAILURE;
}
}
@@ -177,7 +177,7 @@ int32_t udfdCPluginUdfAggStart(SUdfInterBuf *buf, void *udfCtx) {
if (ctx->aggStartFunc) {
return ctx->aggStartFunc(buf);
} else {
- fnError("udfd c plugin aggregation start not implemented");
+ fnError("taosudf c plugin aggregation start not implemented");
return TSDB_CODE_UDF_FUNC_EXEC_FAILURE;
}
return 0;
@@ -189,7 +189,7 @@ int32_t udfdCPluginUdfAggProc(SUdfDataBlock *block, SUdfInterBuf *interBuf, SUdf
if (ctx->aggProcFunc) {
return ctx->aggProcFunc(block, interBuf, newInterBuf);
} else {
- fnError("udfd c plugin aggregation process not implemented");
+ fnError("taosudf c plugin aggregation process not implemented");
return TSDB_CODE_UDF_FUNC_EXEC_FAILURE;
}
}
@@ -201,7 +201,7 @@ int32_t udfdCPluginUdfAggProc(SUdfDataBlock *block, SUdfInterBuf *interBuf, SUdf
// if (ctx->aggMergeFunc) {
// return ctx->aggMergeFunc(inputBuf1, inputBuf2, outputBuf);
// } else {
-// fnError("udfd c plugin aggregation merge not implemented");
+// fnError("taosudf c plugin aggregation merge not implemented");
// return TSDB_CODE_UDF_FUNC_EXEC_FAILURE;
// }
// }
@@ -212,7 +212,7 @@ int32_t udfdCPluginUdfAggFinish(SUdfInterBuf *buf, SUdfInterBuf *resultData, voi
if (ctx->aggFinishFunc) {
return ctx->aggFinishFunc(buf, resultData);
} else {
- fnError("udfd c plugin aggregation finish not implemented");
+ fnError("taosudf c plugin aggregation finish not implemented");
return TSDB_CODE_UDF_FUNC_EXEC_FAILURE;
}
return 0;
@@ -667,7 +667,7 @@ void udfdFreeUdf(void *pData) {
if (pSudf->scriptPlugin != NULL) {
if(pSudf->scriptPlugin->udfDestroyFunc(pSudf->scriptUdfCtx) != 0) {
- fnError("udfdFreeUdf: udfd destroy udf %s failed", pSudf->name);
+ fnError("udfdFreeUdf: taosudf destroy udf %s failed", pSudf->name);
}
}
@@ -688,15 +688,15 @@ int32_t udfdGetOrCreateUdf(SUdf **ppUdf, const char *udfName) {
++(*pUdfHash)->refCount;
*ppUdf = *pUdfHash;
uv_mutex_unlock(&global.udfsMutex);
- fnInfo("udfd reuse existing udf. udf %s udf version %d, udf created time %" PRIx64, (*ppUdf)->name, (*ppUdf)->version,
+ fnInfo("taosudf reuse existing udf. udf %s udf version %d, udf created time %" PRIx64, (*ppUdf)->name, (*ppUdf)->version,
(*ppUdf)->createdTime);
return 0;
} else {
(*pUdfHash)->expired = true;
- fnInfo("udfd expired, check for new version. existing udf %s udf version %d, udf created time %" PRIx64,
+ fnInfo("taosudf expired, check for new version. existing udf %s udf version %d, udf created time %" PRIx64,
(*pUdfHash)->name, (*pUdfHash)->version, (*pUdfHash)->createdTime);
if(taosHashRemove(global.udfsHash, udfName, strlen(udfName)) != 0) {
- fnError("udfdGetOrCreateUdf: udfd remove udf %s failed", udfName);
+ fnError("udfdGetOrCreateUdf: taosudf remove udf %s failed", udfName);
}
}
}
@@ -1001,7 +1001,7 @@ void udfdProcessTeardownRequest(SUvUdfWork *uvUdf, SUdfRequest *request) {
uv_cond_destroy(&udf->condReady);
uv_mutex_destroy(&udf->lock);
code = udf->scriptPlugin->udfDestroyFunc(udf->scriptUdfCtx);
- fnDebug("udfd destroy function returns %d", code);
+ fnDebug("taosudf destroy function returns %d", code);
taosMemoryFree(udf);
}
@@ -1063,7 +1063,7 @@ int32_t udfdSaveFuncBodyToFile(SFuncInfo *pFuncInfo, SUdf *udf) {
TAOS_UDF_CHECK_PTR_RCODE(pFuncInfo, udf);
if (!osDataSpaceAvailable()) {
terrno = TSDB_CODE_NO_DISKSPACE;
- fnError("udfd create shared library failed since %s", terrstr());
+ fnError("taosudf create shared library failed since %s", terrstr());
return terrno;
}
@@ -1072,22 +1072,22 @@ int32_t udfdSaveFuncBodyToFile(SFuncInfo *pFuncInfo, SUdf *udf) {
bool fileExist = !(taosStatFile(path, NULL, NULL, NULL) < 0);
if (fileExist) {
tstrncpy(udf->path, path, PATH_MAX);
- fnInfo("udfd func body file. reuse existing file %s", path);
+ fnInfo("taosudf func body file. reuse existing file %s", path);
return TSDB_CODE_SUCCESS;
}
TdFilePtr file = taosOpenFile(path, TD_FILE_CREATE | TD_FILE_WRITE | TD_FILE_READ | TD_FILE_TRUNC);
if (file == NULL) {
- fnError("udfd write udf shared library: %s failed, error: %d %s", path, errno, strerror(terrno));
+ fnError("taosudf write udf shared library: %s failed, error: %d %s", path, errno, strerror(terrno));
return TSDB_CODE_FILE_CORRUPTED;
}
int64_t count = taosWriteFile(file, pFuncInfo->pCode, pFuncInfo->codeSize);
if (count != pFuncInfo->codeSize) {
- fnError("udfd write udf shared library failed");
+ fnError("taosudf write udf shared library failed");
return TSDB_CODE_FILE_CORRUPTED;
}
if(taosCloseFile(&file) != 0) {
- fnError("udfdSaveFuncBodyToFile, udfd close file failed");
+ fnError("udfdSaveFuncBodyToFile, taosudf close file failed");
return TSDB_CODE_FILE_CORRUPTED;
}
@@ -1106,7 +1106,7 @@ void udfdProcessRpcRsp(void *parent, SRpcMsg *pMsg, SEpSet *pEpSet) {
}
if (pMsg->code != TSDB_CODE_SUCCESS) {
- fnError("udfd rpc error, code:%s", tstrerror(pMsg->code));
+ fnError("taosudf rpc error, code:%s", tstrerror(pMsg->code));
msgInfo->code = pMsg->code;
goto _return;
}
@@ -1114,7 +1114,7 @@ void udfdProcessRpcRsp(void *parent, SRpcMsg *pMsg, SEpSet *pEpSet) {
if (msgInfo->rpcType == UDFD_RPC_MNODE_CONNECT) {
SConnectRsp connectRsp = {0};
if(tDeserializeSConnectRsp(pMsg->pCont, pMsg->contLen, &connectRsp) < 0){
- fnError("udfd deserialize connect response failed");
+ fnError("taosudf deserialize connect response failed");
goto _return;
}
@@ -1137,7 +1137,7 @@ void udfdProcessRpcRsp(void *parent, SRpcMsg *pMsg, SEpSet *pEpSet) {
} else if (msgInfo->rpcType == UDFD_RPC_RETRIVE_FUNC) {
SRetrieveFuncRsp retrieveRsp = {0};
if(tDeserializeSRetrieveFuncRsp(pMsg->pCont, pMsg->contLen, &retrieveRsp) < 0){
- fnError("udfd deserialize retrieve func response failed");
+ fnError("taosudf deserialize retrieve func response failed");
goto _return;
}
@@ -1303,16 +1303,16 @@ int32_t udfdOpenClientRpc() {
}
void udfdCloseClientRpc() {
- fnInfo("udfd begin closing rpc");
+ fnInfo("taosudf begin closing rpc");
rpcClose(global.clientRpc);
- fnInfo("udfd finish closing rpc");
+ fnInfo("taosudf finish closing rpc");
}
void udfdOnWrite(uv_write_t *req, int status) {
TAOS_UDF_CHECK_PTR_RVOID(req);
SUvUdfWork *work = (SUvUdfWork *)req->data;
if (status < 0) {
- fnError("udfd send response error, length:%zu code:%s", work->output.len, uv_err_name(status));
+ fnError("taosudf send response error, length:%zu code:%s", work->output.len, uv_err_name(status));
}
// remove work from the connection work list
if (work->conn != NULL) {
@@ -1337,14 +1337,14 @@ void udfdSendResponse(uv_work_t *work, int status) {
if (udfWork->conn != NULL) {
uv_write_t *write_req = taosMemoryMalloc(sizeof(uv_write_t));
if(write_req == NULL) {
- fnError("udfd send response error, malloc failed");
+ fnError("taosudf send response error, malloc failed");
taosMemoryFree(work);
return;
}
write_req->data = udfWork;
int32_t code = uv_write(write_req, udfWork->conn->client, &udfWork->output, 1, udfdOnWrite);
if (code != 0) {
- fnError("udfd send response error %s", uv_strerror(code));
+ fnError("taosudf send response error %s", uv_strerror(code));
taosMemoryFree(write_req);
}
}
@@ -1365,7 +1365,7 @@ void udfdAllocBuffer(uv_handle_t *handle, size_t suggestedSize, uv_buf_t *buf) {
buf->base = ctx->inputBuf;
buf->len = ctx->inputCap;
} else {
- fnError("udfd can not allocate enough memory") buf->base = NULL;
+ fnError("taosudf can not allocate enough memory") buf->base = NULL;
buf->len = 0;
}
} else if (ctx->inputTotal == -1 && ctx->inputLen < msgHeadSize) {
@@ -1379,7 +1379,7 @@ void udfdAllocBuffer(uv_handle_t *handle, size_t suggestedSize, uv_buf_t *buf) {
buf->base = ctx->inputBuf + ctx->inputLen;
buf->len = ctx->inputCap - ctx->inputLen;
} else {
- fnError("udfd can not allocate enough memory") buf->base = NULL;
+ fnError("taosudf can not allocate enough memory") buf->base = NULL;
buf->len = 0;
}
}
@@ -1387,7 +1387,7 @@ void udfdAllocBuffer(uv_handle_t *handle, size_t suggestedSize, uv_buf_t *buf) {
bool isUdfdUvMsgComplete(SUdfdUvConn *pipe) {
if (pipe == NULL) {
- fnError("udfd pipe is NULL, LINE:%d", __LINE__);
+ fnError("taosudf pipe is NULL, LINE:%d", __LINE__);
return false;
}
if (pipe->inputTotal == -1 && pipe->inputLen >= sizeof(int32_t)) {
@@ -1407,12 +1407,12 @@ void udfdHandleRequest(SUdfdUvConn *conn) {
uv_work_t *work = taosMemoryMalloc(sizeof(uv_work_t));
if(work == NULL) {
- fnError("udfd malloc work failed");
+ fnError("taosudf malloc work failed");
return;
}
SUvUdfWork *udfWork = taosMemoryMalloc(sizeof(SUvUdfWork));
if(udfWork == NULL) {
- fnError("udfd malloc udf work failed");
+ fnError("taosudf malloc udf work failed");
taosMemoryFree(work);
return;
}
@@ -1427,7 +1427,7 @@ void udfdHandleRequest(SUdfdUvConn *conn) {
work->data = udfWork;
if(uv_queue_work(global.loop, work, udfdProcessRequest, udfdSendResponse) != 0)
{
- fnError("udfd queue work failed");
+ fnError("taosudf queue work failed");
taosMemoryFree(work);
taosMemoryFree(udfWork);
}
@@ -1449,7 +1449,7 @@ void udfdPipeCloseCb(uv_handle_t *pipe) {
void udfdPipeRead(uv_stream_t *client, ssize_t nread, const uv_buf_t *buf) {
TAOS_UDF_CHECK_PTR_RVOID(client, buf);
- fnDebug("udfd read %zd bytes from client", nread);
+ fnDebug("taosudf read %zd bytes from client", nread);
if (nread == 0) return;
SUdfdUvConn *conn = client->data;
@@ -1466,7 +1466,7 @@ void udfdPipeRead(uv_stream_t *client, ssize_t nread, const uv_buf_t *buf) {
if (nread < 0) {
if (nread == UV_EOF) {
- fnInfo("udfd pipe read EOF");
+ fnInfo("taosudf pipe read EOF");
} else {
fnError("Receive error %s", uv_err_name(nread));
}
@@ -1477,26 +1477,26 @@ void udfdPipeRead(uv_stream_t *client, ssize_t nread, const uv_buf_t *buf) {
void udfdOnNewConnection(uv_stream_t *server, int status) {
TAOS_UDF_CHECK_PTR_RVOID(server);
if (status < 0) {
- fnError("udfd new connection error, code:%s", uv_strerror(status));
+ fnError("taosudf new connection error, code:%s", uv_strerror(status));
return;
}
int32_t code = 0;
uv_pipe_t *client = (uv_pipe_t *)taosMemoryMalloc(sizeof(uv_pipe_t));
if(client == NULL) {
- fnError("udfd pipe malloc failed");
+ fnError("taosudf pipe malloc failed");
return;
}
code = uv_pipe_init(global.loop, client, 0);
if (code) {
- fnError("udfd pipe init error %s", uv_strerror(code));
+ fnError("taosudf pipe init error %s", uv_strerror(code));
taosMemoryFree(client);
return;
}
if (uv_accept(server, (uv_stream_t *)client) == 0) {
SUdfdUvConn *ctx = taosMemoryMalloc(sizeof(SUdfdUvConn));
if(ctx == NULL) {
- fnError("udfd conn malloc failed");
+ fnError("taosudf conn malloc failed");
goto _exit;
}
ctx->pWorkList = NULL;
@@ -1508,7 +1508,7 @@ void udfdOnNewConnection(uv_stream_t *server, int status) {
ctx->client = (uv_stream_t *)client;
code = uv_read_start((uv_stream_t *)client, udfdAllocBuffer, udfdPipeRead);
if (code) {
- fnError("udfd read start error %s", uv_strerror(code));
+ fnError("taosudf read start error %s", uv_strerror(code));
udfdUvHandleError(ctx);
taosMemoryFree(ctx);
taosMemoryFree(client);
@@ -1522,7 +1522,7 @@ _exit:
void udfdIntrSignalHandler(uv_signal_t *handle, int signum) {
TAOS_UDF_CHECK_PTR_RVOID(handle);
- fnInfo("udfd signal received: %d\n", signum);
+ fnInfo("taosudf signal received: %d\n", signum);
uv_fs_t req;
int32_t code = uv_fs_unlink(global.loop, &req, global.listenPipeName, NULL);
if(code) {
@@ -1558,7 +1558,7 @@ static int32_t udfdParseArgs(int32_t argc, char *argv[]) {
}
static void udfdPrintVersion() {
- (void)printf("udfd version: %s compatible_version: %s\n", td_version, td_compatible_version);
+ (void)printf("taosudf version: %s compatible_version: %s\n", td_version, td_compatible_version);
(void)printf("git: %s\n", td_gitinfo);
(void)printf("build: %s\n", td_buildinfo);
}
@@ -1573,7 +1573,7 @@ void udfdCtrlAllocBufCb(uv_handle_t *handle, size_t suggested_size, uv_buf_t *bu
TAOS_UDF_CHECK_PTR_RVOID(buf);
buf->base = taosMemoryMalloc(suggested_size);
if (buf->base == NULL) {
- fnError("udfd ctrl pipe alloc buffer failed");
+ fnError("taosudf ctrl pipe alloc buffer failed");
return;
}
buf->len = suggested_size;
@@ -1582,13 +1582,13 @@ void udfdCtrlAllocBufCb(uv_handle_t *handle, size_t suggested_size, uv_buf_t *bu
void udfdCtrlReadCb(uv_stream_t *q, ssize_t nread, const uv_buf_t *buf) {
TAOS_UDF_CHECK_PTR_RVOID(q, buf);
if (nread < 0) {
- fnError("udfd ctrl pipe read error. %s", uv_err_name(nread));
+ fnError("taosudf ctrl pipe read error. %s", uv_err_name(nread));
taosMemoryFree(buf->base);
uv_close((uv_handle_t *)q, NULL);
uv_stop(global.loop);
return;
}
- fnError("udfd ctrl pipe read %zu bytes", nread);
+ fnError("taosudf ctrl pipe read %zu bytes", nread);
taosMemoryFree(buf->base);
}
@@ -1604,7 +1604,7 @@ static void removeListeningPipe() {
static int32_t udfdUvInit() {
TAOS_CHECK_RETURN(uv_loop_init(global.loop));
- if (tsStartUdfd) { // udfd is started by taosd, which shall exit when taosd exit
+ if (tsStartUdfd) { // taosudf is started by taosd, which shall exit when taosd exit
TAOS_CHECK_RETURN(uv_pipe_init(global.loop, &global.ctrlPipe, 1));
TAOS_CHECK_RETURN(uv_pipe_open(&global.ctrlPipe, 0));
TAOS_CHECK_RETURN(uv_read_start((uv_stream_t *)&global.ctrlPipe, udfdCtrlAllocBufCb, udfdCtrlReadCb));
@@ -1642,13 +1642,13 @@ static void udfdCloseWalkCb(uv_handle_t *handle, void *arg) {
static int32_t udfdGlobalDataInit() {
uv_loop_t *loop = taosMemoryMalloc(sizeof(uv_loop_t));
if (loop == NULL) {
- fnError("udfd init uv loop failed, mem overflow");
+ fnError("taosudf init uv loop failed, mem overflow");
return terrno;
}
global.loop = loop;
if (uv_mutex_init(&global.scriptPluginsMutex) != 0) {
- fnError("udfd init script plugins mutex failed");
+ fnError("taosudf init script plugins mutex failed");
return TSDB_CODE_UDF_UV_EXEC_FAILURE;
}
@@ -1659,7 +1659,7 @@ static int32_t udfdGlobalDataInit() {
// taosHashSetFreeFp(global.udfsHash, udfdFreeUdf);
if (uv_mutex_init(&global.udfsMutex) != 0) {
- fnError("udfd init udfs mutex failed");
+ fnError("taosudf init udfs mutex failed");
return TSDB_CODE_UDF_UV_EXEC_FAILURE;
}
@@ -1670,23 +1670,23 @@ static void udfdGlobalDataDeinit() {
uv_mutex_destroy(&global.udfsMutex);
uv_mutex_destroy(&global.scriptPluginsMutex);
taosMemoryFreeClear(global.loop);
- fnInfo("udfd global data deinit");
+ fnInfo("taosudf global data deinit");
}
static void udfdRun() {
- fnInfo("start udfd event loop");
+ fnInfo("start taosudf event loop");
int32_t code = uv_run(global.loop, UV_RUN_DEFAULT);
if(code != 0) {
- fnError("udfd event loop still has active handles or requests.");
+ fnError("taosudf event loop still has active handles or requests.");
}
- fnInfo("udfd event loop stopped.");
+ fnInfo("taosudf event loop stopped.");
(void)uv_loop_close(global.loop);
uv_walk(global.loop, udfdCloseWalkCb, NULL);
code = uv_run(global.loop, UV_RUN_DEFAULT);
if(code != 0) {
- fnError("udfd event loop still has active handles or requests.");
+ fnError("taosudf event loop still has active handles or requests.");
}
(void)uv_loop_close(global.loop);
}
@@ -1702,7 +1702,7 @@ int32_t udfdInitResidentFuncs() {
while ((token = strtok_r(pSave, ",", &pSave)) != NULL) {
char func[TSDB_FUNC_NAME_LEN + 1] = {0};
tstrncpy(func, token, TSDB_FUNC_NAME_LEN);
- fnInfo("udfd add resident function %s", func);
+ fnInfo("taosudf add resident function %s", func);
if(taosArrayPush(global.residentFuncs, func) == NULL)
{
taosArrayDestroy(global.residentFuncs);
@@ -1722,18 +1722,18 @@ void udfdDeinitResidentFuncs() {
int32_t code = 0;
if (udf->scriptPlugin->udfDestroyFunc) {
code = udf->scriptPlugin->udfDestroyFunc(udf->scriptUdfCtx);
- fnDebug("udfd %s destroy function returns %d", funcName, code);
+ fnDebug("taosudf %s destroy function returns %d", funcName, code);
}
if(taosHashRemove(global.udfsHash, funcName, strlen(funcName)) != 0)
{
- fnError("udfd remove resident function %s failed", funcName);
+ fnError("taosudf remove resident function %s failed", funcName);
}
taosMemoryFree(udf);
}
}
taosHashCleanup(global.udfsHash);
taosArrayDestroy(global.residentFuncs);
- fnInfo("udfd resident functions are deinit");
+ fnInfo("taosudf resident functions are deinit");
}
int32_t udfdCreateUdfSourceDir() {
@@ -1743,7 +1743,7 @@ int32_t udfdCreateUdfSourceDir() {
snprintf(global.udfDataDir, PATH_MAX, "%s/.udf", tsTempDir);
code = taosMkDir(global.udfDataDir);
}
- fnInfo("udfd create udf source directory %s. result: %s", global.udfDataDir, tstrerror(code));
+ fnInfo("taosudf create udf source directory %s. result: %s", global.udfDataDir, tstrerror(code));
return code;
}
@@ -1779,7 +1779,7 @@ int main(int argc, char *argv[]) {
if (udfdInitLog() != 0) {
// ignore create log failed, because this error no matter
- (void)printf("failed to init udfd log.");
+ (void)printf("failed to init taosudf log.");
} else {
logInitialized = true; // log is initialized
}
@@ -1790,20 +1790,20 @@ int main(int argc, char *argv[]) {
goto _exit;
}
cfgInitialized = true; // cfg is initialized
- fnInfo("udfd start with config file %s", configDir);
+ fnInfo("taosudf start with config file %s", configDir);
if (initEpSetFromCfg(tsFirst, tsSecond, &global.mgmtEp) != 0) {
fnError("init ep set from cfg failed");
code = -3;
goto _exit;
}
- fnInfo("udfd start with mnode ep %s", global.mgmtEp.epSet.eps[0].fqdn);
+ fnInfo("taosudf start with mnode ep %s", global.mgmtEp.epSet.eps[0].fqdn);
if (udfdOpenClientRpc() != 0) {
fnError("open rpc connection to mnode failed");
code = -4;
goto _exit;
}
- fnInfo("udfd rpc client is opened");
+ fnInfo("taosudf rpc client is opened");
openClientRpcFinished = true; // rpc is opened
if (udfdCreateUdfSourceDir() != 0) {
@@ -1812,7 +1812,7 @@ int main(int argc, char *argv[]) {
goto _exit;
}
udfSourceDirInited = true; // udf source dir is created
- fnInfo("udfd udf source directory is created");
+ fnInfo("taosudf udf source directory is created");
if (udfdGlobalDataInit() != 0) {
fnError("init global data failed");
@@ -1820,14 +1820,14 @@ int main(int argc, char *argv[]) {
goto _exit;
}
globalDataInited = true; // global data is inited
- fnInfo("udfd global data is inited");
+ fnInfo("taosudf global data is inited");
if (udfdUvInit() != 0) {
fnError("uv init failure");
code = -7;
goto _exit;
}
- fnInfo("udfd uv is inited");
+ fnInfo("taosudf uv is inited");
if (udfdInitResidentFuncs() != 0) {
fnError("init resident functions failed");
@@ -1835,10 +1835,10 @@ int main(int argc, char *argv[]) {
goto _exit;
}
residentFuncsInited = true; // resident functions are inited
- fnInfo("udfd resident functions are inited");
+ fnInfo("taosudf resident functions are inited");
udfdRun();
- fnInfo("udfd exit normally");
+ fnInfo("taosudf exit normally");
removeListeningPipe();
diff --git a/tests/system-test/0-others/udfTest.py b/tests/system-test/0-others/udfTest.py
index 8134327b41..7d953f2977 100644
--- a/tests/system-test/0-others/udfTest.py
+++ b/tests/system-test/0-others/udfTest.py
@@ -622,17 +622,17 @@ class TDTestCase:
tdLog.info("taosd found in %s" % buildPath)
cfgPath = buildPath + "/../sim/dnode1/cfg"
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(3):
- tdLog.info(" loop restart udfd %d_th" % i)
+ tdLog.info(" loop restart taosudf %d_th" % i)
tdSql.query("select udf2(sub1.c1 ,sub1.c2), udf2(sub2.c2 ,sub2.c1) from sub1, sub2 where sub1.ts=sub2.ts and sub1.c1 is not null")
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep -v grep| grep -v defunct | awk '{print $2}'"
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep -v grep| grep -v defunct | awk '{print $2}'"
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -643,9 +643,9 @@ class TDTestCase:
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # # start udfd cmds
+ # # start taosudf cmds
# start_udfd = "nohup " + udfdPath +'-c' +cfgPath +" > /dev/null 2>&1 &"
- # tdLog.info("start udfd : %s " % start_udfd)
+ # tdLog.info("start taosudf : %s " % start_udfd)
def test_function_name(self):
tdLog.info(" create function name is not build_in functions ")
@@ -680,15 +680,15 @@ class TDTestCase:
time.sleep(2)
def test_udfd_cmd(self):
- tdLog.info(" test udfd -V ")
- os.system("udfd -V")
- tdLog.info(" test udfd -c ")
- os.system("udfd -c")
+ tdLog.info(" test taosudf -V ")
+ os.system("taosudf -V")
+ tdLog.info(" test taosudf -c ")
+ os.system("taosudf -c")
letters = string.ascii_letters + string.digits + '\\'
path = ''.join(random.choice(letters) for i in range(5000))
- os.system(f"udfd -c {path}")
+ os.system(f"taosudf -c {path}")
def test_change_udf_normal(self, func_name):
# create function with normal file
diff --git a/tests/system-test/0-others/udf_cfg2.py b/tests/system-test/0-others/udf_cfg2.py
index 89c4030977..10af25c305 100644
--- a/tests/system-test/0-others/udf_cfg2.py
+++ b/tests/system-test/0-others/udf_cfg2.py
@@ -592,17 +592,17 @@ class TDTestCase:
tdLog.info("taosd found in %s" % buildPath)
cfgPath = buildPath + "/../sim/dnode1/cfg"
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(3):
- tdLog.info(" loop restart udfd %d_th" % i)
+ tdLog.info(" loop restart taosudf %d_th" % i)
tdSql.query("select udf2(sub1.c1 ,sub1.c2), udf2(sub2.c2 ,sub2.c1) from sub1, sub2 where sub1.ts=sub2.ts and sub1.c1 is not null")
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep -v grep| grep -v defunct | awk '{print $2}'"
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep -v grep| grep -v defunct | awk '{print $2}'"
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -613,9 +613,9 @@ class TDTestCase:
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # # start udfd cmds
+ # # start taosudf cmds
# start_udfd = "nohup " + udfdPath +'-c' +cfgPath +" > /dev/null 2>&1 &"
- # tdLog.info("start udfd : %s " % start_udfd)
+ # tdLog.info("start taosudf : %s " % start_udfd)
def test_function_name(self):
tdLog.info(" create function name is not build_in functions ")
diff --git a/tests/system-test/0-others/udf_cluster.py b/tests/system-test/0-others/udf_cluster.py
index c41412c10d..2935bd1167 100644
--- a/tests/system-test/0-others/udf_cluster.py
+++ b/tests/system-test/0-others/udf_cluster.py
@@ -301,14 +301,14 @@ class TDTestCase:
cfgPath = dnode.cfgDir
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(5):
- tdLog.info(" loop restart udfd %d_th at dnode_index : %s" % (i ,dnode.index))
+ tdLog.info(" loop restart taosudf %d_th at dnode_index : %s" % (i ,dnode.index))
self.basic_udf_query(dnode)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep %s | grep 'root' | grep -v grep| grep -v defunct | awk '{print $2}'"%cfgPath
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep %s | grep 'root' | grep -v grep| grep -v defunct | awk '{print $2}'"%cfgPath
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -317,7 +317,7 @@ class TDTestCase:
def test_restart_udfd_All_dnodes(self):
for dnode in self.TDDnodes.dnodes:
- tdLog.info(" start restart udfd for dnode_index :%s" %dnode.index )
+ tdLog.info(" start restart taosudf for dnode_index :%s" %dnode.index )
self.restart_udfd(dnode)
diff --git a/tests/system-test/0-others/udf_create.py b/tests/system-test/0-others/udf_create.py
index 9038d99ff9..a5a5335a9b 100644
--- a/tests/system-test/0-others/udf_create.py
+++ b/tests/system-test/0-others/udf_create.py
@@ -570,7 +570,7 @@ class TDTestCase:
time.sleep(1)
hwnd = win32gui.FindWindow(None, "Microsoft Visual C++ Runtime Library")
if hwnd:
- os.system("TASKKILL /F /IM udfd.exe")
+ os.system("TASKKILL /F /IM taosudf.exe")
def unexpected_create(self):
if (platform.system().lower() == 'windows' and tdDnodes.dnodes[0].remoteIP == ""):
@@ -627,17 +627,17 @@ class TDTestCase:
tdLog.info("taosd found in %s" % buildPath)
cfgPath = buildPath + "/../sim/dnode1/cfg"
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(3):
- tdLog.info(" loop restart udfd %d_th" % i)
+ tdLog.info(" loop restart taosudf %d_th" % i)
tdSql.query("select udf2(sub1.c1 ,sub1.c2), udf2(sub2.c2 ,sub2.c1) from sub1, sub2 where sub1.ts=sub2.ts and sub1.c1 is not null")
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep -v grep| grep -v defunct | awk '{print $2}'"
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep -v grep| grep -v defunct | awk '{print $2}'"
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -648,9 +648,9 @@ class TDTestCase:
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # # start udfd cmds
+ # # start taosudf cmds
# start_udfd = "nohup " + udfdPath +'-c' +cfgPath +" > /dev/null 2>&1 &"
- # tdLog.info("start udfd : %s " % start_udfd)
+ # tdLog.info("start taosudf : %s " % start_udfd)
def test_function_name(self):
tdLog.info(" create function name is not build_in functions ")
diff --git a/tests/system-test/0-others/udf_restart_taosd.py b/tests/system-test/0-others/udf_restart_taosd.py
index f9a3f08bf5..539bc9c79c 100644
--- a/tests/system-test/0-others/udf_restart_taosd.py
+++ b/tests/system-test/0-others/udf_restart_taosd.py
@@ -589,17 +589,17 @@ class TDTestCase:
tdLog.info("taosd found in %s" % buildPath)
cfgPath = buildPath + "/../sim/dnode1/cfg"
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(3):
- tdLog.info(" loop restart udfd %d_th" % i)
+ tdLog.info(" loop restart taosudf %d_th" % i)
tdSql.query("select udf2(sub1.c1 ,sub1.c2), udf2(sub2.c2 ,sub2.c1) from sub1, sub2 where sub1.ts=sub2.ts and sub1.c1 is not null")
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep -v grep| grep -v defunct | awk '{print $2}'"
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep -v grep| grep -v defunct | awk '{print $2}'"
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -610,9 +610,9 @@ class TDTestCase:
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # # start udfd cmds
+ # # start taosudf cmds
# start_udfd = "nohup " + udfdPath +'-c' +cfgPath +" > /dev/null 2>&1 &"
- # tdLog.info("start udfd : %s " % start_udfd)
+ # tdLog.info("start taosudf : %s " % start_udfd)
def test_function_name(self):
tdLog.info(" create function name is not build_in functions ")
diff --git a/tests/test_new/udf/udf_create.py b/tests/test_new/udf/udf_create.py
index 9038d99ff9..a5a5335a9b 100644
--- a/tests/test_new/udf/udf_create.py
+++ b/tests/test_new/udf/udf_create.py
@@ -570,7 +570,7 @@ class TDTestCase:
time.sleep(1)
hwnd = win32gui.FindWindow(None, "Microsoft Visual C++ Runtime Library")
if hwnd:
- os.system("TASKKILL /F /IM udfd.exe")
+ os.system("TASKKILL /F /IM taosudf.exe")
def unexpected_create(self):
if (platform.system().lower() == 'windows' and tdDnodes.dnodes[0].remoteIP == ""):
@@ -627,17 +627,17 @@ class TDTestCase:
tdLog.info("taosd found in %s" % buildPath)
cfgPath = buildPath + "/../sim/dnode1/cfg"
- udfdPath = buildPath +'/build/bin/udfd'
+ udfdPath = buildPath +'/build/bin/taosudf'
for i in range(3):
- tdLog.info(" loop restart udfd %d_th" % i)
+ tdLog.info(" loop restart taosudf %d_th" % i)
tdSql.query("select udf2(sub1.c1 ,sub1.c2), udf2(sub2.c2 ,sub2.c1) from sub1, sub2 where sub1.ts=sub2.ts and sub1.c1 is not null")
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # stop udfd cmds
- get_processID = "ps -ef | grep -w udfd | grep -v grep| grep -v defunct | awk '{print $2}'"
+ # stop taosudf cmds
+ get_processID = "ps -ef | grep -w taosudf | grep -v grep| grep -v defunct | awk '{print $2}'"
processID = subprocess.check_output(get_processID, shell=True).decode("utf-8")
stop_udfd = " kill -9 %s" % processID
os.system(stop_udfd)
@@ -648,9 +648,9 @@ class TDTestCase:
tdSql.checkData(0,0,169.661427555)
tdSql.checkData(0,1,169.661427555)
- # # start udfd cmds
+ # # start taosudf cmds
# start_udfd = "nohup " + udfdPath +'-c' +cfgPath +" > /dev/null 2>&1 &"
- # tdLog.info("start udfd : %s " % start_udfd)
+ # tdLog.info("start taosudf : %s " % start_udfd)
def test_function_name(self):
tdLog.info(" create function name is not build_in functions ")
From c27662cf09a5b6c35aa58182fb921e0e9333f879 Mon Sep 17 00:00:00 2001
From: facetosea <285808407@qq.com>
Date: Fri, 7 Mar 2025 09:51:05 +0800
Subject: [PATCH 099/105] fix: show variables rows check
---
tests/pytest/util/sql.py | 11 +++++++++++
tests/system-test/2-query/db.py | 8 ++++----
2 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/tests/pytest/util/sql.py b/tests/pytest/util/sql.py
index 3c79c22bee..fca3eb70e5 100644
--- a/tests/pytest/util/sql.py
+++ b/tests/pytest/util/sql.py
@@ -704,6 +704,17 @@ class TDSql:
tdLog.info("sql:%s, elm:%s == expect_elm:%s" % (self.sql, elm, expect_elm))
return True
self.print_error_frame_info(elm, expect_elm)
+
+ def checkGreater(self, elm, expect_elm):
+ if elm > expect_elm:
+ tdLog.info("sql:%s, elm:%s > expect_elm:%s" % (self.sql, elm, expect_elm))
+ return True
+ else:
+ caller = inspect.getframeinfo(inspect.stack()[1][0])
+ args = (caller.filename, caller.lineno, self.sql, elm, expect_elm)
+ tdLog.info("%s(%d) failed: sql:%s, elm:%s <= expect_elm:%s" % args)
+ self.print_error_frame_info(elm, expect_elm)
+ return False
def checkNotEqual(self, elm, expect_elm):
if elm != expect_elm:
diff --git a/tests/system-test/2-query/db.py b/tests/system-test/2-query/db.py
index 3408f02e8b..2a63249bcc 100644
--- a/tests/system-test/2-query/db.py
+++ b/tests/system-test/2-query/db.py
@@ -46,8 +46,8 @@ class TDTestCase:
tdSql.checkRows(2)
def case2(self):
- tdSql.query("show variables")
- tdSql.checkRows(87)
+ tdSql.query("show variables")
+ tdSql.checkGreater(tdSql.getRows(), 80)
for i in range(self.replicaVar):
tdSql.query("show dnode %d variables like 'debugFlag'" % (i + 1))
@@ -88,7 +88,7 @@ class TDTestCase:
def show_local_variables_like(self):
tdSql.query("show local variables")
- tdSql.checkRows(85)
+ tdSql.checkGreater(tdSql.getRows(), 80)
tdSql.query("show local variables like 'debugFlag'")
tdSql.checkRows(1)
@@ -116,7 +116,7 @@ class TDTestCase:
for zone in zones:
tdLog.info(f"show {zone} variables")
tdSql.query(f"show {zone} variables")
- tdSql.checkRows(87)
+ tdSql.checkGreater(tdSql.getRows(), 80)
tdLog.info(f"show {zone} variables like 'debugFlag'")
#tdSql.query(f"show {zone} variables like 'debugFlag'")
From 609ff730c46b0b1ab1e6c632fd2f11d1aeaff7a7 Mon Sep 17 00:00:00 2001
From: Simon Guan
Date: Fri, 7 Mar 2025 10:03:43 +0800
Subject: [PATCH 100/105] refactor: jemalloc options
---
README-CN.md | 168 +++++++++++++++----------------
cmake/cmake.define | 7 +-
source/dnode/mgmt/CMakeLists.txt | 24 ++---
source/os/CMakeLists.txt | 47 +++++----
tools/shell/CMakeLists.txt | 18 ++--
5 files changed, 129 insertions(+), 135 deletions(-)
diff --git a/README-CN.md b/README-CN.md
index ad622b3896..8c78bb14ee 100644
--- a/README-CN.md
+++ b/README-CN.md
@@ -8,30 +8,30 @@
-简体中文 | [English](README.md) | [TDengine 云服务](https://cloud.taosdata.com/?utm_medium=cn&utm_source=github) | 很多职位正在热招中,请看[这里](https://www.taosdata.com/careers/)
+简体中文 | [English](README.md) | [TDengine 云服务](https://cloud.taosdata.com/?utm_medium=cn&utm_source=github) | 很多职位正在热招中,请看 [这里](https://www.taosdata.com/careers/)
# 目录
1. [TDengine 简介](#1-tdengine-简介)
1. [文档](#2-文档)
1. [必备工具](#3-必备工具)
- - [3.1 Linux预备](#31-linux系统)
- - [3.2 macOS预备](#32-macos系统)
- - [3.3 Windows预备](#33-windows系统)
+ - [3.1 Linux 预备](#31-Linux系统)
+ - [3.2 macOS 预备](#32-macOS系统)
+ - [3.3 Windows 预备](#3.3-Windows系统)
- [3.4 克隆仓库](#34-克隆仓库)
1. [构建](#4-构建)
- - [4.1 Linux系统上构建](#41-linux系统上构建)
- - [4.2 macOS系统上构建](#42-macos系统上构建)
- - [4.3 Windows系统上构建](#43-windows系统上构建)
+ - [4.1 Linux 系统上构建](#41-Linux系统上构建)
+ - [4.2 macOS 系统上构建](#42-macOS系统上构建)
+ - [4.3 Windows 系统上构建](#43-Windows系统上构建)
1. [打包](#5-打包)
1. [安装](#6-安装)
- - [6.1 Linux系统上安装](#61-linux系统上安装)
- - [6.2 macOS系统上安装](#62-macos系统上安装)
- - [6.3 Windows系统上安装](#63-windows系统上安装)
+ - [6.1 Linux 系统上安装](#61-Linux系统上安装)
+ - [6.2 macOS 系统上安装](#62-macOS系统上安装)
+ - [6.3 Windows 系统上安装](#63-Windows系统上安装)
1. [快速运行](#7-快速运行)
- - [7.1 Linux系统上运行](#71-linux系统上运行)
- - [7.2 macOS系统上运行](#72-macos系统上运行)
- - [7.3 Windows系统上运行](#73-windows系统上运行)
+ - [7.1 Linux 系统上运行](#71-Linux系统上运行)
+ - [7.2 macOS 系统上运行](#72-macOS系统上运行)
+ - [7.3 Windows 系统上运行](#73-Windows系统上运行)
1. [测试](#8-测试)
1. [版本发布](#9-版本发布)
1. [工作流](#10-工作流)
@@ -43,9 +43,9 @@
TDengine 是一款开源、高性能、云原生的时序数据库 (Time-Series Database, TSDB)。TDengine 能被广泛运用于物联网、工业互联网、车联网、IT 运维、金融等领域。除核心的时序数据库功能外,TDengine 还提供缓存、数据订阅、流式计算等功能,是一极简的时序数据处理平台,最大程度的减小系统设计的复杂度,降低研发和运营成本。与其他时序数据库相比,TDengine 的主要优势如下:
-- **高性能**:通过创新的存储引擎设计,无论是数据写入还是查询,TDengine 的性能比通用数据库快 10 倍以上,也远超其他时序数据库,存储空间不及通用数据库的1/10。
+- **高性能**:通过创新的存储引擎设计,无论是数据写入还是查询,TDengine 的性能比通用数据库快 10 倍以上,也远超其他时序数据库,存储空间不及通用数据库的 1/10。
-- **云原生**:通过原生分布式的设计,充分利用云平台的优势,TDengine 提供了水平扩展能力,具备弹性、韧性和可观测性,支持k8s部署,可运行在公有云、私有云和混合云上。
+- **云原生**:通过原生分布式的设计,充分利用云平台的优势,TDengine 提供了水平扩展能力,具备弹性、韧性和可观测性,支持 k8s 部署,可运行在公有云、私有云和混合云上。
- **极简时序数据平台**:TDengine 内建消息队列、缓存、流式计算等功能,应用无需再集成 Kafka/Redis/HBase/Spark 等软件,大幅降低系统的复杂度,降低应用开发和运营成本。
@@ -53,29 +53,29 @@ TDengine 是一款开源、高性能、云原生的时序数据库 (Time-Series
- **简单易用**:无任何依赖,安装、集群几秒搞定;提供REST以及各种语言连接器,与众多第三方工具无缝集成;提供命令行程序,便于管理和即席查询;提供各种运维工具。
-- **核心开源**:TDengine 的核心代码包括集群功能全部开源,截止到2022年8月1日,全球超过 135.9k 个运行实例,GitHub Star 18.7k,Fork 4.4k,社区活跃。
+- **核心开源**:TDengine 的核心代码包括集群功能全部开源,截止到 2022 年 8 月 1 日,全球超过 135.9k 个运行实例,GitHub Star 18.7k,Fork 4.4k,社区活跃。
-了解TDengine高级功能的完整列表,请 [点击](https://tdengine.com/tdengine/)。体验TDengine最简单的方式是通过[TDengine云平台](https://cloud.tdengine.com)。
+了解TDengine高级功能的完整列表,请 [点击](https://tdengine.com/tdengine/)。体验 TDengine 最简单的方式是通过 [TDengine云平台](https://cloud.tdengine.com)。
# 2. 文档
关于完整的使用手册,系统架构和更多细节,请参考 [TDengine](https://www.taosdata.com/) 或者 [TDengine 官方文档](https://docs.taosdata.com)。
-用户可根据需求选择通过[容器](https://docs.taosdata.com/get-started/docker/)、[安装包](https://docs.taosdata.com/get-started/package/)、[Kubernetes](https://docs.taosdata.com/deployment/k8s/)来安装或直接使用无需安装部署的[云服务](https://cloud.taosdata.com/)。本快速指南是面向想自己编译、打包、测试的开发者的。
+用户可根据需求选择通过 [容器](https://docs.taosdata.com/get-started/docker/)、[安装包](https://docs.taosdata.com/get-started/package/)、[Kubernetes](https://docs.taosdata.com/deployment/k8s/) 来安装或直接使用无需安装部署的 [云服务](https://cloud.taosdata.com/)。本快速指南是面向想自己编译、打包、测试的开发者的。
-如果想编译或测试TDengine连接器,请访问以下仓库: [JDBC连接器](https://github.com/taosdata/taos-connector-jdbc), [Go连接器](https://github.com/taosdata/driver-go), [Python连接器](https://github.com/taosdata/taos-connector-python), [Node.js连接器](https://github.com/taosdata/taos-connector-node), [C#连接器](https://github.com/taosdata/taos-connector-dotnet), [Rust连接器](https://github.com/taosdata/taos-connector-rust).
+如果想编译或测试 TDengine 连接器,请访问以下仓库:[JDBC连接器](https://github.com/taosdata/taos-connector-jdbc)、[Go连接器](https://github.com/taosdata/driver-go)、[Python连接器](https://github.com/taosdata/taos-connector-python)、[Node.js连接器](https://github.com/taosdata/taos-connector-node)、[C#连接器](https://github.com/taosdata/taos-connector-dotnet)、[Rust连接器](https://github.com/taosdata/taos-connector-rust)。
# 3. 前置条件
-TDengine 目前可以在 Linux、 Windows、macOS 等平台上安装和运行。任何 OS 的应用也可以选择 taosAdapter 的 RESTful 接口连接服务端 taosd。CPU 支持 X64/ARM64,后续会支持 MIPS64、Alpha64、ARM32、RISC-V 等 CPU 架构。目前不支持使用交叉编译器构建。
+TDengine 目前可以在 Linux、 Windows、macOS 等平台上安装和运行。任何 OS 的应用也可以选择 taosAdapter 的 RESTful 接口连接服务端 taosd。CPU 支持 X64、ARM64,后续会支持 MIPS64、Alpha64、ARM32、RISC-V 等 CPU 架构。目前不支持使用交叉编译器构建。
如果你想要编译 taosAdapter 或者 taosKeeper,需要安装 Go 1.18 及以上版本。
-## 3.1 Linux系统
+## 3.1 Linux 系统
-安装Linux必备工具
+安装 Linux 必备工具
### Ubuntu 18.04、20.04、22.04
@@ -96,13 +96,13 @@ yum install -y zlib-static xz-devel snappy-devel jansson-devel pkgconfig libatom
-## 3.2 macOS系统
+## 3.2 macOS 系统
-安装macOS必备工具
+安装 macOS 必备工具
-根据提示安装依赖工具 [brew](https://brew.sh/).
+根据提示安装依赖工具 [brew](https://brew.sh/)
```bash
brew install argp-standalone gflags pkgconfig
@@ -110,11 +110,11 @@ brew install argp-standalone gflags pkgconfig
-## 3.3 Windows系统
+## 3.3 Windows 系统
-安装Windows必备工具
+安装 Windows 必备工具
进行中。
@@ -122,7 +122,7 @@ brew install argp-standalone gflags pkgconfig
## 3.4 克隆仓库
-通过如下命令将TDengine仓库克隆到指定计算机:
+通过如下命令将 TDengine 仓库克隆到指定计算机:
```bash
git clone https://github.com/taosdata/TDengine.git
@@ -131,23 +131,23 @@ cd TDengine
# 4. 构建
-TDengine 还提供一组辅助工具软件 taosTools,目前它包含 taosBenchmark(曾命名为 taosdemo)和 taosdump 两个软件。默认 TDengine 编译不包含 taosTools, 您可以在编译 TDengine 时使用`cmake .. -DBUILD_TOOLS=true` 来同时编译 taosTools。
+TDengine 还提供一组辅助工具软件 taosTools,目前它包含 taosBenchmark(曾命名为 taosdemo)和 taosdump 两个软件。默认 TDengine 编译不包含 taosTools,您可以在编译 TDengine 时使用 `cmake .. -DBUILD_TOOLS=true` 来同时编译 taosTools。
-为了构建TDengine, 请使用 [CMake](https://cmake.org/) 3.13.0 或者更高版本。
+为了构建 TDengine,请使用 [CMake](https://cmake.org/) 3.13.0 或者更高版本。
-## 4.1 Linux系统上构建
+## 4.1 Linux 系统上构建
-Linux系统上构建步骤
+Linux 系统上构建步骤
-可以通过以下命令使用脚本 `build.sh` 编译TDengine和taosTools,包括taosBenchmark和taosdump:
+可以通过以下命令使用脚本 `build.sh` 编译 TDengine 和 taosTools,包括 taosBenchmark 和 taosdump。
```bash
./build.sh
```
-也可以通过以下命令进行构建:
+也可以通过以下命令进行构建:
```bash
mkdir debug && cd debug
@@ -157,15 +157,15 @@ make
如果你想要编译 taosAdapter,需要添加 `-DBUILD_HTTP=false` 选项。
-如果你想要编译 taosKeeper,需要添加 `--DBUILD_KEEPER=true` 选项。
+如果你想要编译 taosKeeper,需要添加 `-DBUILD_KEEPER=true` 选项。
-可以使用Jemalloc作为内存分配器,而不是使用glibc:
+可以使用 Jemalloc 作为内存分配器,而不是使用 glibc:
```bash
cmake .. -DJEMALLOC_ENABLED=ON
```
-TDengine构建脚本可以自动检测 x86、x86-64、arm64 平台上主机的体系结构。
-您也可以通过 CPUTYPE 选项手动指定架构:
+TDengine 构建脚本可以自动检测 x86、x86-64、arm64 平台上主机的体系结构。
+您也可以通过 CPUTYPE 选项手动指定架构:
```bash
cmake .. -DCPUTYPE=aarch64 && cmake --build .
@@ -173,13 +173,13 @@ cmake .. -DCPUTYPE=aarch64 && cmake --build .
-## 4.2 macOS系统上构建
+## 4.2 macOS 系统上构建
-macOS系统上构建步骤
+macOS 系统上构建步骤
-请安装XCode命令行工具和cmake。使用XCode 11.4+在Catalina和Big Sur上完成验证。
+请安装 XCode 命令行工具和 cmake。使用 XCode 11.4+ 在 Catalina 和 Big Sur 上完成验证。
```shell
mkdir debug && cd debug
@@ -192,14 +192,14 @@ cmake .. && cmake --build .
-## 4.3 Windows系统上构建
+## 4.3 Windows 系统上构建
-Windows系统上构建步骤
+Windows 系统上构建步骤
-如果您使用的是Visual Studio 2013,请执行“cmd.exe”打开命令窗口执行如下命令。
-执行vcvarsall.bat时,64位的Windows请指定“amd64”,32位的Windows请指定“x86”。
+如果您使用的是 Visual Studio 2013,请执行 “cmd.exe” 打开命令窗口执行如下命令。
+执行 vcvarsall.bat 时,64 位的 Windows 请指定 “amd64”,32 位的 Windows 请指定 “x86”。
```cmd
mkdir debug && cd debug
@@ -208,19 +208,19 @@ cmake .. -G "NMake Makefiles"
nmake
```
-如果您使用Visual Studio 2019或2017:
+如果您使用 Visual Studio 2019 或 2017:
-请执行“cmd.exe”打开命令窗口执行如下命令。
-执行vcvarsall.bat时,64位的Windows请指定“x64”,32位的Windows请指定“x86”。
+请执行 “cmd.exe” 打开命令窗口执行如下命令。
+执行 vcvarsall.bat 时,64 位的 Windows 请指定 “x64”,32 位的 Windows 请指定 “x86”。
```cmd
mkdir debug && cd debug
-"c:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat" < x64 | x86 >
+"C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvarsall.bat" < x64 | x86 >
cmake .. -G "NMake Makefiles"
nmake
```
-或者,您可以通过点击Windows开始菜单打开命令窗口->“Visual Studio < 2019 | 2017 >”文件夹->“x64原生工具命令提示符VS < 2019 | 2017 >”或“x86原生工具命令提示符VS < 2019 | 2017 >”取决于你的Windows是什么架构,然后执行命令如下:
+或者,您可以通过点击 Windows 开始菜单打开命令窗口 -> `Visual Studio < 2019 | 2017 >` 文件夹 -> `x64 原生工具命令提示符 VS < 2019 | 2017 >` 或 `x86 原生工具命令提示符 < 2019 | 2017 >` 取决于你的 Windows 是什么架构,然后执行命令如下:
```cmd
mkdir debug && cd debug
@@ -231,33 +231,33 @@ nmake
# 5. 打包
-由于一些组件依赖关系,TDengine社区安装程序不能仅由该存储库创建。我们仍在努力改进。
+由于一些组件依赖关系,TDengine 社区安装程序不能仅由该存储库创建。我们仍在努力改进。
# 6. 安装
-## 6.1 Linux系统上安装
+## 6.1 Linux 系统上安装
-Linux系统上安装详细步骤
+Linux 系统上安装详细步骤
-构建成功后,TDengine可以通过以下命令进行安装:
+构建成功后,TDengine 可以通过以下命令进行安装:
```bash
sudo make install
```
-从源代码安装还将为TDengine配置服务管理。用户也可以使用[TDengine安装包](https://docs.taosdata.com/get-started/package/)进行安装。
+从源代码安装还将为 TDengine 配置服务管理。用户也可以使用 [TDengine安装包](https://docs.taosdata.com/get-started/package/)进行安装。
-## 6.2 macOS系统上安装
+## 6.2 macOS 系统上安装
-macOS系统上安装详细步骤
+macOS 系统上安装详细步骤
-构建成功后,TDengine可以通过以下命令进行安装:
+构建成功后,TDengine可以通过以下命令进行安装:
```bash
sudo make install
@@ -265,13 +265,13 @@ sudo make install
-## 6.3 Windows系统上安装
+## 6.3 Windows 系统上安装
-Windows系统上安装详细步骤
+Windows 系统上安装详细步骤
-构建成功后,TDengine可以通过以下命令进行安装:
+构建成功后,TDengine 可以通过以下命令进行安装:
```cmd
nmake install
@@ -281,32 +281,32 @@ nmake install
# 7. 快速运行
-## 7.1 Linux系统上运行
+## 7.1 Linux 系统上运行
-Linux系统上运行详细步骤
+Linux 系统上运行详细步骤
-在Linux系统上安装TDengine完成后,在终端运行如下命令启动服务:
+在Linux 系统上安装 TDengine 完成后,在终端运行如下命令启动服务:
```bash
sudo systemctl start taosd
```
-然后用户可以通过如下命令使用TDengine命令行连接TDengine服务:
+然后用户可以通过如下命令使用 TDengine 命令行连接 TDengine 服务:
```bash
taos
```
-如果TDengine 命令行连接服务器成功,系统将打印欢迎信息和版本信息。否则,将显示连接错误信息。
+如果 TDengine 命令行连接服务器成功,系统将打印欢迎信息和版本信息。否则,将显示连接错误信息。
-如果您不想将TDengine作为服务运行,您可以在当前终端中运行它。例如,要在构建完成后快速启动TDengine服务器,在终端中运行以下命令:(我们以Linux为例,Windows上的命令为 `taosd.exe`)
+如果您不想将 TDengine 作为服务运行,您可以在当前终端中运行它。例如,要在构建完成后快速启动 TDengine 服务器,在终端中运行以下命令:(以 Linux 为例,Windows 上的命令为 `taosd.exe`)
```bash
./build/bin/taosd -c test/cfg
```
-在另一个终端上,使用TDengine命令行连接服务器:
+在另一个终端上,使用 TDengine 命令行连接服务器:
```bash
./build/bin/taos -c test/cfg
@@ -316,42 +316,42 @@ taos
-## 7.2 macOS系统上运行
+## 7.2 macOS 系统上运行
-macOS系统上运行详细步骤
+macOS 系统上运行详细步骤
-在macOS上安装完成后启动服务,双击/applications/TDengine启动程序,或者在终端中执行如下命令:
+在 macOS 上安装完成后启动服务,双击 `/applications/TDengine` 启动程序,或者在终端中执行如下命令:
```bash
sudo launchctl start com.tdengine.taosd
```
-然后在终端中使用如下命令通过TDengine命令行连接TDengine服务器:
+然后在终端中使用如下命令通过 TDengine 命令行连接 TDengine 服务器:
```bash
taos
```
-如果TDengine命令行连接服务器成功,系统将打印欢迎信息和版本信息。否则,将显示错误信息。
+如果 TDengine 命令行连接服务器成功,系统将打印欢迎信息和版本信息。否则,将显示错误信息。
-## 7.3 Windows系统上运行
+## 7.3 Windows 系统上运行
-Windows系统上运行详细步骤
+Windows 系统上运行详细步骤
-您可以使用以下命令在Windows平台上启动TDengine服务器:
+您可以使用以下命令在 Windows 平台上启动 TDengine 服务器:
```cmd
.\build\bin\taosd.exe -c test\cfg
```
-在另一个终端上,使用TDengine命令行连接服务器:
+在另一个终端上,使用 TDengine 命令行连接服务器:
```cmd
.\build\bin\taos.exe -c test\cfg
@@ -363,25 +363,25 @@ taos
# 8. 测试
-有关如何在TDengine上运行不同类型的测试,请参考 [TDengine测试](./tests/README-CN.md)
+有关如何在 TDengine 上运行不同类型的测试,请参考 [TDengine测试](./tests/README-CN.md)
# 9. 版本发布
-TDengine发布版本的完整列表,请参考 [版本列表](https://github.com/taosdata/TDengine/releases)
+TDengine 发布版本的完整列表,请参考 [版本列表](https://github.com/taosdata/TDengine/releases)
# 10. 工作流
-TDengine构建检查工作流可以在参考 [Github Action](https://github.com/taosdata/TDengine/actions/workflows/taosd-ci-build.yml), 更多的工作流正在创建中,将很快可用。
+TDengine 构建检查工作流可以在参考 [Github Action](https://github.com/taosdata/TDengine/actions/workflows/taosd-ci-build.yml),更多的工作流正在创建中,将很快可用。
# 11. 覆盖率
-最新的TDengine测试覆盖率报告可参考 [coveralls.io](https://coveralls.io/github/taosdata/TDengine)
+最新的 TDengine 测试覆盖率报告可参考 [coveralls.io](https://coveralls.io/github/taosdata/TDengine)
如何在本地运行测试覆盖率报告?
-在本地创建测试覆盖率报告(HTML格式),请运行以下命令:
+在本地创建测试覆盖率报告(HTML 格式),请运行以下命令:
```bash
cd tests
@@ -389,8 +389,8 @@ bash setup-lcov.sh -v 1.16 && ./run_local_coverage.sh -b main -c task
# on main branch and run cases in longtimeruning_cases.task
# for more infomation about options please refer to ./run_local_coverage.sh -h
```
-> **注意:**
-> 请注意,-b和-i选项将使用-DCOVER=true选项重新编译TDengine,这可能需要花费一些时间。
+> **注意**:
+> 请注意,-b 和 -i 选项将使用 -DCOVER=true 选项重新编译 TDengine,这可能需要花费一些时间。
diff --git a/cmake/cmake.define b/cmake/cmake.define
index 72c9e84f78..3770f1f3b0 100644
--- a/cmake/cmake.define
+++ b/cmake/cmake.define
@@ -116,8 +116,6 @@ ELSE()
set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz")
ENDIF()
-# SET(JEMALLOC_ENABLED OFF)
-
IF(TD_WINDOWS)
MESSAGE("${Yellow} set compiler flag for Windows! ${ColourReset}")
@@ -259,12 +257,15 @@ ELSE()
ENDIF()
-IF(TD_LINUX)
+IF(TD_LINUX_64)
IF(${JEMALLOC_ENABLED})
MESSAGE(STATUS "JEMALLOC Enabled")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=attributes")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=attributes")
+ SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc")
+ ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib)
ELSE()
MESSAGE(STATUS "JEMALLOC Disabled")
+ SET(LINK_JEMALLOC "")
ENDIF()
ENDIF()
\ No newline at end of file
diff --git a/source/dnode/mgmt/CMakeLists.txt b/source/dnode/mgmt/CMakeLists.txt
index 5d356e06b1..ac89c35db5 100644
--- a/source/dnode/mgmt/CMakeLists.txt
+++ b/source/dnode/mgmt/CMakeLists.txt
@@ -14,24 +14,12 @@ target_include_directories(
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/node_mgmt/inc"
)
-IF(TD_ENTERPRISE)
- IF(${BUILD_WITH_S3})
+if(TD_ENTERPRISE)
+ if(${BUILD_WITH_S3})
add_definitions(-DUSE_S3)
- ELSEIF(${BUILD_WITH_COS})
+ elseif(${BUILD_WITH_COS})
add_definitions(-DUSE_COS)
- ENDIF()
-ENDIF()
+ endif()
+endif()
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc)
- SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc")
-ELSE()
- SET(LINK_JEMALLOC "")
-ENDIF()
-
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEPENDENCIES(taosd jemalloc)
- target_link_libraries(taosd dnode crypt ${LINK_JEMALLOC})
-ELSE()
- target_link_libraries(taosd dnode crypt)
-ENDIF()
+target_link_libraries(taosd dnode crypt)
diff --git a/source/os/CMakeLists.txt b/source/os/CMakeLists.txt
index 6b33e68377..01103e7bd0 100644
--- a/source/os/CMakeLists.txt
+++ b/source/os/CMakeLists.txt
@@ -1,4 +1,5 @@
aux_source_directory(src OS_SRC)
+
if(NOT ${TD_WINDOWS})
add_definitions(-DTHREAD_SAFE=1)
aux_source_directory(src/timezone OS_TZ)
@@ -6,6 +7,7 @@ if(NOT ${TD_WINDOWS})
else()
add_library(os STATIC ${OS_SRC})
endif(NOT ${TD_WINDOWS})
+
target_include_directories(
os
PUBLIC "${TD_SOURCE_DIR}/include/os"
@@ -16,21 +18,26 @@ target_include_directories(
PUBLIC "${TD_SOURCE_DIR}/contrib/msvcregex"
PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc"
)
+
# iconv
if(TD_WINDOWS)
find_path(IconvApiIncludes iconv.h "${TD_SOURCE_DIR}/contrib/iconv")
else()
find_path(IconvApiIncludes iconv.h PATHS)
endif(TD_WINDOWS)
+
if(NOT IconvApiIncludes)
- add_definitions(-DDISALLOW_NCHAR_WITHOUT_ICONV)
-endif ()
+ add_definitions(-DDISALLOW_NCHAR_WITHOUT_ICONV)
+endif()
+
if(USE_TD_MEMORY)
- add_definitions(-DUSE_TD_MEMORY)
-endif ()
+ add_definitions(-DUSE_TD_MEMORY)
+endif()
+
if(BUILD_WITH_RAND_ERR)
- add_definitions(-DBUILD_WITH_RAND_ERR)
-endif ()
+ add_definitions(-DBUILD_WITH_RAND_ERR)
+endif()
+
if(BUILD_ADDR2LINE)
if(NOT TD_WINDOWS)
target_include_directories(
@@ -41,31 +48,36 @@ if(BUILD_ADDR2LINE)
os PUBLIC addr2line dl z
)
endif()
+
add_definitions(-DUSE_ADDR2LINE)
-endif ()
+endif()
+
if(CHECK_STR2INT_ERROR)
add_definitions(-DTD_CHECK_STR_TO_INT_ERROR)
endif()
+
target_link_libraries(
os
PUBLIC pthread
PUBLIC zlibstatic
+ PUBLIC ${LINK_JEMALLOC}
)
+
if(TD_WINDOWS)
target_link_libraries(
os PUBLIC ws2_32 iconv msvcregex wcwidth winmm crashdump dbghelp version KtmW32
)
elseif(TD_DARWIN_64)
find_library(CORE_FOUNDATION_FRAMEWORK CoreFoundation)
- target_link_libraries(os PUBLIC ${CORE_FOUNDATION_FRAMEWORK})
+ target_link_libraries(os PUBLIC ${CORE_FOUNDATION_FRAMEWORK})
find_library(SYSTEM_CONFIGURATION_FRAMEWORK SystemConfiguration)
- target_link_libraries(os PUBLIC ${SYSTEM_CONFIGURATION_FRAMEWORK})
+ target_link_libraries(os PUBLIC ${SYSTEM_CONFIGURATION_FRAMEWORK})
target_link_libraries(
os PUBLIC dl m iconv
)
elseif(TD_ALPINE)
target_link_libraries(
- os PUBLIC dl m rt unwind
+ os PUBLIC dl m rt unwind
)
else()
target_link_libraries(
@@ -73,15 +85,14 @@ else()
)
endif()
-IF (JEMALLOC_ENABLED)
- target_link_libraries(os PUBLIC -L${CMAKE_BINARY_DIR}/build/lib -ljemalloc)
-ENDIF ()
-
-#if(NOT ${TD_WINDOWS})
-# find_library(tz libtz.a "${TD_SOURCE_DIR}/contrib/tz")
-# target_link_libraries(os PUBLIC ${tz})
-#endif(NOT ${TD_WINDOWS})
+if(JEMALLOC_ENABLED)
+ add_dependencies(os jemalloc)
+endif()
+# if(NOT ${TD_WINDOWS})
+# find_library(tz libtz.a "${TD_SOURCE_DIR}/contrib/tz")
+# target_link_libraries(os PUBLIC ${tz})
+# endif(NOT ${TD_WINDOWS})
if(${BUILD_TEST})
add_subdirectory(test)
endif(${BUILD_TEST})
diff --git a/tools/shell/CMakeLists.txt b/tools/shell/CMakeLists.txt
index 2301f33803..ac901f5ca2 100644
--- a/tools/shell/CMakeLists.txt
+++ b/tools/shell/CMakeLists.txt
@@ -2,14 +2,6 @@ aux_source_directory(src SHELL_SRC)
add_executable(shell ${SHELL_SRC})
-IF(TD_LINUX_64 AND JEMALLOC_ENABLED)
- ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc)
- SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc")
- ADD_DEPENDENCIES(shell jemalloc)
-ELSE()
- SET(LINK_JEMALLOC "")
-ENDIF()
-
IF(TD_LINUX AND TD_WEBSOCKET)
ADD_DEFINITIONS(-DWEBSOCKET -I${CMAKE_BINARY_DIR}/build/include -ltaosws)
SET(LINK_WEBSOCKET "-L${CMAKE_BINARY_DIR}/build/lib -ltaosws")
@@ -33,13 +25,14 @@ ELSE()
ENDIF()
if(TD_WINDOWS)
- target_link_libraries(shell PUBLIC ${TAOS_LIB_STATIC} ${LINK_WEBSOCKET})
+ target_link_libraries(shell PUBLIC ${TAOS_LIB_STATIC})
else()
- target_link_libraries(shell PUBLIC ${TAOS_LIB} ${LINK_WEBSOCKET} ${LINK_JEMALLOC} ${LINK_ARGP})
+ target_link_libraries(shell PUBLIC ${TAOS_LIB} ${LINK_ARGP})
endif()
target_link_libraries(
shell
+ PUBLIC ${LINK_WEBSOCKET}
PRIVATE os common transport geometry util
)
@@ -53,16 +46,17 @@ SET_TARGET_PROPERTIES(shell PROPERTIES OUTPUT_NAME taos)
#
# generator library shell_ut for uint test
#
-
IF(TD_LINUX)
# include
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/inc)
+
# shell_ut library
add_library(shell_ut STATIC ${SHELL_SRC})
IF(TD_WEBSOCKET)
ADD_DEPENDENCIES(shell_ut taosws-rs)
ENDIF()
+
target_link_libraries(shell_ut PUBLIC ${TAOS_LIB} ${LINK_WEBSOCKET} ${LINK_JEMALLOC} ${LINK_ARGP})
target_link_libraries(shell_ut PRIVATE os common transport geometry util)
@@ -76,5 +70,5 @@ IF(TD_LINUX)
# unit test
IF(${BUILD_TEST})
ADD_SUBDIRECTORY(test)
- ENDIF(${BUILD_TEST})
+ ENDIF(${BUILD_TEST})
ENDIF()
From 41fe7c55531b3299ff69692cc5e121b24aa85d15 Mon Sep 17 00:00:00 2001
From: Feng Chao
Date: Fri, 7 Mar 2025 10:56:44 +0800
Subject: [PATCH 101/105] ci: Update ci workflow to prepare the migration
---
.github/workflows/taosd-ci.yml | 22 +++++++++++-----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/.github/workflows/taosd-ci.yml b/.github/workflows/taosd-ci.yml
index c4f18303ee..36576eb187 100644
--- a/.github/workflows/taosd-ci.yml
+++ b/.github/workflows/taosd-ci.yml
@@ -1,14 +1,14 @@
name: TDengine CI Test
on:
- # pull_request:
- # branches:
- # - 'main'
- # - '3.0'
- # - '3.1'
- # paths-ignore:
- # - 'packaging/**'
- # - 'docs/**'
+ pull_request:
+ branches:
+ - 'main'
+ - '3.0'
+ - '3.1'
+ paths-ignore:
+ - 'packaging/**'
+ - 'docs/**'
repository_dispatch:
types: [run-tests]
@@ -57,14 +57,14 @@ jobs:
cd ${{ env.WKC }}
changed_files_non_doc=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD $target_branch`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | tr '\n' ' ' || :)
- if [[ "$changed_files_non_doc" != '' && "$changed_files_non_doc" =~ /forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics/ ]]; then
+ if [[ "$changed_files_non_doc" != '' && "$changed_files_non_doc" =~ /forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics|tdgpt/ ]]; then
run_tdgpt_test="true"
else
run_tdgpt_test="false"
fi
# check whether to run function test cases
- changed_files_non_tdgpt=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD $target_branch`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | grep -Ev "forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics" | tr '\n' ' ' ||:)
+ changed_files_non_tdgpt=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD $target_branch`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | grep -Ev "forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics|tdgpt" | tr '\n' ' ' ||:)
if [ "$changed_files_non_tdgpt" != '' ]; then
run_function_test="true"
else
@@ -231,7 +231,7 @@ jobs:
if: ${{ needs.fetch-parameters.outputs.run_function_test == 'true' }}
runs-on:
group: CI
- labels: [self-hosted, macOS, ARM64, testing]
+ labels: [self-hosted, macOS, testing]
timeout-minutes: 60
env:
IS_TDINTERNAL: ${{ needs.fetch-parameters.outputs.tdinternal }}
From 1c2072eb80307cc4d286d7128256a7d78a620556 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 7 Mar 2025 13:53:05 +0800
Subject: [PATCH 102/105] feat: support zlib compression on windows platform
---
tools/taos-tools/README-CN.md | 2 +-
tools/taos-tools/inc/benchCsv.h | 4 ----
tools/taos-tools/src/CMakeLists.txt | 6 +++++-
tools/taos-tools/src/benchCsv.c | 15 +++------------
4 files changed, 9 insertions(+), 18 deletions(-)
diff --git a/tools/taos-tools/README-CN.md b/tools/taos-tools/README-CN.md
index 3def035f68..da14e81cd1 100644
--- a/tools/taos-tools/README-CN.md
+++ b/tools/taos-tools/README-CN.md
@@ -18,7 +18,7 @@ taosdump 是用于备份 TDengine 数据到本地目录和从本地目录恢复
#### 对于 Ubuntu/Debian 系统
```shell
-sudo apt install libjansson-dev libsnappy-dev liblzma-dev libz-dev zlib1g pkg-config libssl-dev
+sudo apt install libjansson-dev libsnappy-dev liblzma-dev libz-dev zlib1g zlib1g-dev pkg-config libssl-dev
```
#### 对于 CentOS 7/RHEL 系统
diff --git a/tools/taos-tools/inc/benchCsv.h b/tools/taos-tools/inc/benchCsv.h
index f944600ecb..6bf531cf14 100644
--- a/tools/taos-tools/inc/benchCsv.h
+++ b/tools/taos-tools/inc/benchCsv.h
@@ -16,9 +16,7 @@
#ifndef INC_BENCHCSV_H_
#define INC_BENCHCSV_H_
-#ifndef _WIN32
#include
-#endif
#include "bench.h"
@@ -41,9 +39,7 @@ typedef struct {
CsvCompressionLevel compress_level;
CsvIoError result;
union {
-#ifndef _WIN32
gzFile gf;
-#endif
FILE* fp;
} handle;
} CsvFileHandle;
diff --git a/tools/taos-tools/src/CMakeLists.txt b/tools/taos-tools/src/CMakeLists.txt
index 93b1530020..320fb1f413 100644
--- a/tools/taos-tools/src/CMakeLists.txt
+++ b/tools/taos-tools/src/CMakeLists.txt
@@ -317,7 +317,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux" OR ${CMAKE_SYSTEM_NAME} MATCHES "Darwin
ENDIF ()
- target_link_libraries(taosBenchmark z)
+ TARGET_LINK_LIBRARIES(taosBenchmark z)
ELSE ()
ADD_DEFINITIONS(-DWINDOWS)
@@ -334,6 +334,7 @@ ELSE ()
ADD_DEPENDENCIES(taosdump deps-snappy)
ADD_DEPENDENCIES(taosdump deps-libargp)
ADD_DEPENDENCIES(taosdump apache-avro)
+ ADD_DEPENDENCIES(taosBenchmark tools-zlib)
IF (${WEBSOCKET})
INCLUDE_DIRECTORIES(/usr/local/include/)
@@ -365,5 +366,8 @@ ELSE ()
ENDIF ()
TARGET_LINK_LIBRARIES(taosBenchmark taos msvcregex pthread toolscJson ${WEBSOCKET_LINK_FLAGS})
+
+ TARGET_LINK_LIBRARIES(taosBenchmark zlibstatic)
+
ENDIF ()
diff --git a/tools/taos-tools/src/benchCsv.c b/tools/taos-tools/src/benchCsv.c
index d08b9d19b0..f8c43dbb97 100644
--- a/tools/taos-tools/src/benchCsv.c
+++ b/tools/taos-tools/src/benchCsv.c
@@ -953,15 +953,12 @@ static CsvFileHandle* csvOpen(const char* filename, CsvCompressionLevel compress
if (compress_level == CSV_COMPRESS_NONE) {
fhdl->handle.fp = fopen(filename, "w");
failed = (!fhdl->handle.fp);
- }
-#ifndef _WIN32
- else {
+ } else {
char mode[TINY_BUFF_LEN];
(void)snprintf(mode, sizeof(mode), "wb%d", compress_level);
fhdl->handle.gf = gzopen(filename, mode);
failed = (!fhdl->handle.gf);
}
-#endif
if (failed) {
tmfree(fhdl);
@@ -989,9 +986,7 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
fhdl->result = CSV_ERR_WRITE_FAILED;
return CSV_ERR_WRITE_FAILED;
}
- }
-#ifndef _WIN32
- else {
+ } else {
int ret = gzwrite(fhdl->handle.gf, buf, size);
if (ret != size) {
errorPrint("Failed to write csv file: %s. expected written %zu but %d.\n",
@@ -1003,7 +998,6 @@ static CsvIoError csvWrite(CsvFileHandle* fhdl, const char* buf, size_t size) {
return CSV_ERR_WRITE_FAILED;
}
}
-#endif
return CSV_ERR_OK;
}
@@ -1019,15 +1013,12 @@ static void csvClose(CsvFileHandle* fhdl) {
fclose(fhdl->handle.fp);
fhdl->handle.fp = NULL;
}
- }
-#ifndef _WIN32
- else {
+ } else {
if (fhdl->handle.gf) {
gzclose(fhdl->handle.gf);
fhdl->handle.gf = NULL;
}
}
-#endif
tmfree(fhdl);
}
From 8292b8df252f7bbbd2a9e2b0ad12cd72a24327d1 Mon Sep 17 00:00:00 2001
From: Yaming Pei
Date: Fri, 7 Mar 2025 18:19:57 +0800
Subject: [PATCH 103/105] docs: optimize document description
---
.../14-reference/02-tools/10-taosbenchmark.md | 15 +++++-
.../14-reference/02-tools/10-taosbenchmark.md | 17 ++++--
.../army/tools/benchmark/basic/csv-export.py | 3 +-
.../benchmark/basic/json/csv-export.json | 8 +--
tools/taos-tools/example/csv-export.json | 54 +++++++++++++++++++
tools/taos-tools/src/benchJsonOpt.c | 4 +-
6 files changed, 89 insertions(+), 12 deletions(-)
create mode 100644 tools/taos-tools/example/csv-export.json
diff --git a/docs/en/14-reference/02-tools/10-taosbenchmark.md b/docs/en/14-reference/02-tools/10-taosbenchmark.md
index 3c1401de68..19f498eab1 100644
--- a/docs/en/14-reference/02-tools/10-taosbenchmark.md
+++ b/docs/en/14-reference/02-tools/10-taosbenchmark.md
@@ -298,11 +298,11 @@ Parameters related to supertable creation are configured in the `super_tables` s
- **csv_ts_interval**: String type, sets the time interval for splitting generated csv file names. Supports daily, hourly, minute, and second intervals such as 1d/2h/30m/40s. The default value is "1d".
-- **csv_output_header**: String type, sets whether the generated csv files should contain column header descriptions. The default value is "true".
+- **csv_output_header**: String type, sets whether the generated csv files should contain column header descriptions. The default value is "yes".
- **csv_tbname_alias**: String type, sets the alias for the tbname field in the column header descriptions of csv files. The default value is "device_id".
-- **csv_compress_level**: String type, sets the compression level when generating csv files and automatically compressing them into gzip format. Possible values are:
+- **csv_compress_level**: String type, sets the compression level for generating csv-encoded data and automatically compressing it into gzip file. This process directly encodes and compresses the data, rather than first generating a csv file and then compressing it. Possible values are:
- none: No compression
- fast: gzip level 1 compression
- balance: gzip level 6 compression
@@ -502,6 +502,17 @@ Note: Data types in the taosBenchmark configuration file must be in lowercase to
+### Export CSV File Example
+
+
+csv-export.json
+
+```json
+{{#include /TDengine/tools/taos-tools/example/csv-export.json}}
+```
+
+
+
Other json examples see [here](https://github.com/taosdata/TDengine/tree/main/tools/taos-tools/example)
## Output Performance Indicators
diff --git a/docs/zh/14-reference/02-tools/10-taosbenchmark.md b/docs/zh/14-reference/02-tools/10-taosbenchmark.md
index 9902fa56c9..1f97b0702a 100644
--- a/docs/zh/14-reference/02-tools/10-taosbenchmark.md
+++ b/docs/zh/14-reference/02-tools/10-taosbenchmark.md
@@ -93,7 +93,7 @@ taosBenchmark -f
本节所列参数适用于所有功能模式。
-- **filetype**:功能分类,可选值为 `insert`、`query`、`subscribe` 和 `csvfile`。分别对应插入、查询、订阅和生成csv文件功能。每个配置文件中只能指定其中之一。
+- **filetype**:功能分类,可选值为 `insert`、`query`、`subscribe` 和 `csvfile`。分别对应插入、查询、订阅和生成 csv 文件功能。每个配置文件中只能指定其中之一。
- **cfgdir**:TDengine 客户端配置文件所在的目录,默认路径是 /etc/taos 。
@@ -206,11 +206,11 @@ taosBenchmark -f
- **csv_ts_interval**:字符串类型,设置生成的 csv 文件名称中时间段间隔,支持天、小时、分钟、秒级间隔,如 1d/2h/30m/40s,默认值为 1d 。
-- **csv_output_header**:字符串类型,设置生成的 csv 文件是否包含列头描述,默认值为 true 。
+- **csv_output_header**:字符串类型,设置生成的 csv 文件是否包含列头描述,默认值为 yes 。
- **csv_tbname_alias**:字符串类型,设置 csv 文件列头描述中 tbname 字段的别名,默认值为 device_id 。
-- **csv_compress_level**:字符串类型,设置生成 csv 并自动压缩成 gzip 格式文件的压缩等级。可选值为:
+- **csv_compress_level**:字符串类型,设置生成 csv 编码数据并自动压缩成 gzip 格式文件的压缩等级。此过程直接编码并压缩,而非先生成 csv 文件再压缩。可选值为:
- none:不压缩
- fast:gzip 1级压缩
- balance:gzip 6级压缩
@@ -410,6 +410,17 @@ interval 控制休眠时间,避免持续查询慢查询消耗 CPU,单位为
+### 生成 CSV 文件 JSON 示例
+
+
+csv-export.json
+
+```json
+{{#include /TDengine/tools/taos-tools/example/csv-export.json}}
+```
+
+
+
查看更多 json 配置文件示例可 [点击这里](https://github.com/taosdata/TDengine/tree/main/tools/taos-tools/example)
## 输出性能指标
diff --git a/tests/army/tools/benchmark/basic/csv-export.py b/tests/army/tools/benchmark/basic/csv-export.py
index 702490d6ed..65ffb3e541 100644
--- a/tests/army/tools/benchmark/basic/csv-export.py
+++ b/tests/army/tools/benchmark/basic/csv-export.py
@@ -16,6 +16,7 @@ import csv
import datetime
import frame
+import frame.eos
import frame.etool
from frame.log import *
from frame.cases import *
@@ -213,7 +214,7 @@ class TDTestCase(TBase):
# exec
cmd = f"{benchmark} {options} -f {jsonFile}"
- os.system(cmd)
+ eos.exe(cmd)
# check result
self.check_result(jsonFile)
diff --git a/tests/army/tools/benchmark/basic/json/csv-export.json b/tests/army/tools/benchmark/basic/json/csv-export.json
index 2d6f7b7022..88beab0de1 100644
--- a/tests/army/tools/benchmark/basic/json/csv-export.json
+++ b/tests/army/tools/benchmark/basic/json/csv-export.json
@@ -19,7 +19,7 @@
"childtable_from": 1000,
"childtable_to": 1010,
"csv_file_prefix": "data",
- "csv_output_header": "true",
+ "csv_output_header": "yes",
"csv_tbname_alias": "device_id",
"csv_compress_level": "none",
"columns": [
@@ -59,7 +59,7 @@
"csv_file_prefix": "data",
"csv_ts_format": "%Y%m%d",
"csv_ts_interval": "1d",
- "csv_output_header": "true",
+ "csv_output_header": "yes",
"csv_tbname_alias": "device_id",
"csv_compress_level": "none",
"columns": [
@@ -98,7 +98,7 @@
"childtable_from": 1000,
"childtable_to": 1010,
"csv_file_prefix": "data",
- "csv_output_header": "true",
+ "csv_output_header": "yes",
"csv_tbname_alias": "device_id",
"csv_compress_level": "none",
"columns": [
@@ -139,7 +139,7 @@
"csv_file_prefix": "data",
"csv_ts_format": "%Y%m%d",
"csv_ts_interval": "1d",
- "csv_output_header": "true",
+ "csv_output_header": "yes",
"csv_tbname_alias": "device_id",
"csv_compress_level": "none",
"columns": [
diff --git a/tools/taos-tools/example/csv-export.json b/tools/taos-tools/example/csv-export.json
new file mode 100644
index 0000000000..7fa3e96f2f
--- /dev/null
+++ b/tools/taos-tools/example/csv-export.json
@@ -0,0 +1,54 @@
+{
+ "filetype": "csvfile",
+ "output_path": "./csv/",
+ "databases": [
+ {
+ "dbinfo": {
+ "name": "csvdb",
+ "precision": "ms"
+ },
+ "super_tables": [
+ {
+ "name": "table",
+ "childtable_count": 1010,
+ "insert_rows": 1000,
+ "interlace_rows": 1,
+ "childtable_prefix": "d",
+ "timestamp_step": 1000000,
+ "start_timestamp": "2020-10-01 00:00:00.000",
+ "childtable_from": 1000,
+ "childtable_to": 1010,
+ "csv_file_prefix": "data",
+ "csv_ts_format": "%Y%m%d",
+ "csv_ts_interval": "1d",
+ "csv_output_header": "true",
+ "csv_tbname_alias": "device_id",
+ "csv_compress_level": "none",
+ "columns": [
+ { "type": "bool", "name": "bc"},
+ { "type": "float", "name": "fc", "min": 1},
+ { "type": "double", "name": "dc", "min":10, "max":10},
+ { "type": "tinyint", "name": "ti"},
+ { "type": "smallint", "name": "si"},
+ { "type": "int", "name": "ic", "fillNull":"false"},
+ { "type": "bigint", "name": "bi"},
+ { "type": "utinyint", "name": "uti"},
+ { "type": "usmallint", "name": "usi", "min":100, "max":120},
+ { "type": "uint", "name": "ui"},
+ { "type": "ubigint", "name": "ubi"},
+ { "type": "binary", "name": "bin", "len": 16},
+ { "type": "nchar", "name": "nch", "len": 16}
+ ],
+ "tags": [
+ {"type": "tinyint", "name": "groupid","max": 10,"min": 1},
+ {"type": "binary", "name": "location", "len": 16,
+ "values": ["San Francisco", "Los Angles", "San Diego",
+ "San Jose", "Palo Alto", "Campbell", "Mountain View",
+ "Sunnyvale", "Santa Clara", "Cupertino"]
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
diff --git a/tools/taos-tools/src/benchJsonOpt.c b/tools/taos-tools/src/benchJsonOpt.c
index 49b5a6529d..5b992b388e 100644
--- a/tools/taos-tools/src/benchJsonOpt.c
+++ b/tools/taos-tools/src/benchJsonOpt.c
@@ -1434,9 +1434,9 @@ static int getStableInfo(tools_cJSON *dbinfos, int index) {
superTable->csv_output_header = true;
tools_cJSON* oph = tools_cJSON_GetObjectItem(stbInfo, "csv_output_header");
if (oph && oph->type == tools_cJSON_String && oph->valuestring != NULL) {
- if (0 == strcasecmp(oph->valuestring, "yes") || 0 == strcasecmp(oph->valuestring, "true")) {
+ if (0 == strcasecmp(oph->valuestring, "yes")) {
superTable->csv_output_header = true;
- } else if (0 == strcasecmp(oph->valuestring, "no") || 0 == strcasecmp(oph->valuestring, "false")) {
+ } else if (0 == strcasecmp(oph->valuestring, "no")) {
superTable->csv_output_header = false;
}
}
From cc0fa151f34b2b7409d93f6f79cc5ec363ccc936 Mon Sep 17 00:00:00 2001
From: Feng Chao
Date: Fri, 7 Mar 2025 23:16:13 +0800
Subject: [PATCH 104/105] ci: update ci workflow to fix path issue
---
.github/workflows/taosd-ci.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/workflows/taosd-ci.yml b/.github/workflows/taosd-ci.yml
index 36576eb187..e2ad890105 100644
--- a/.github/workflows/taosd-ci.yml
+++ b/.github/workflows/taosd-ci.yml
@@ -164,6 +164,7 @@ jobs:
if: ${{ env.IS_TDINTERNAL == 'false' && env.TARGET_BRANCH != '3.1' }}
run: |
mkdir -p ${{ env.WKDIR }}/tmp/${{ env.PR_NUMBER }}_${{ github.run_number }}
+ cd ${{ env.WKC }}
changed_files_non_doc=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD ${{ env.TARGET_BRANCH }}`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | tr '\n' ' ' || :)
echo $changed_files_non_doc > ${{ env.WKDIR }}/tmp/${{ env.PR_NUMBER }}_${{ github.run_number }}/docs_changed.txt
- name: Check assert testing
From cbfb1111fd29842c667d96a8613465df32f34725 Mon Sep 17 00:00:00 2001
From: Feng Chao
Date: Sat, 8 Mar 2025 12:06:25 +0800
Subject: [PATCH 105/105] ci: update ci workflow to debug tdgpt and function
test condition
---
.github/workflows/taosd-ci.yml | 20 ++++++++++++++------
1 file changed, 14 insertions(+), 6 deletions(-)
diff --git a/.github/workflows/taosd-ci.yml b/.github/workflows/taosd-ci.yml
index e2ad890105..7ce0a5ce48 100644
--- a/.github/workflows/taosd-ci.yml
+++ b/.github/workflows/taosd-ci.yml
@@ -10,7 +10,7 @@ on:
- 'packaging/**'
- 'docs/**'
repository_dispatch:
- types: [run-tests]
+ types: [trigger-tests-from-tdinternal]
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && github.ref || github.event.client_payload.ref}}-${{ github.event_name == 'repository_dispatch' && 'dispatch' || ''}}
@@ -55,21 +55,29 @@ jobs:
# check whether to run tdgpt test cases
cd ${{ env.WKC }}
- changed_files_non_doc=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD $target_branch`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | tr '\n' ' ' || :)
-
- if [[ "$changed_files_non_doc" != '' && "$changed_files_non_doc" =~ /forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics|tdgpt/ ]]; then
+ changed_files_non_doc=$(git --no-pager diff --name-only FETCH_HEAD $(git merge-base FETCH_HEAD $target_branch) | grep -v "^docs/en/" | grep -v "^docs/zh/" | grep -v ".md$" | tr '\n' ' ' || :)
+ echo “changed files exclude doc, ${changed_files_non_doc}"
+ if [[ -n "$changed_files_non_doc" && "$changed_files_non_doc" =~ (forecastoperator\.c|anomalywindowoperator\.c|tanalytics\.h|tanalytics\.c|tdgpt_cases\.task|analytics|tdgpt) ]]; then
run_tdgpt_test="true"
else
run_tdgpt_test="false"
fi
+ echo "run tdgpt test: ${run_tdgpt_test}"
# check whether to run function test cases
- changed_files_non_tdgpt=$(git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD $target_branch`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | grep -Ev "forecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics|tdgpt" | tr '\n' ' ' ||:)
- if [ "$changed_files_non_tdgpt" != '' ]; then
+ changed_files_non_tdgpt=$(git --no-pager diff --name-only FETCH_HEAD $(git merge-base FETCH_HEAD $target_branch) | \
+ grep -v "^docs/en/" | \
+ grep -v "^docs/zh/" | \
+ grep -v ".md$" | \
+ grep -Ev "forecastoperator\.c|anomalywindowoperator\.c|tanalytics\.h|tanalytics\.c|tdgpt_cases\.task|analytics|tdgpt" | \
+ tr '\n' ' ' || :)
+ echo "changed file exclude tdgpt: ${changed_files_non_tdgpt}"
+ if [ -n "$changed_files_non_tdgpt" ]; then
run_function_test="true"
else
run_function_test="false"
fi
+ echo "run function test: ${run_function_test}"
fi
echo "tdinternal=$tdinternal" >> $GITHUB_OUTPUT