From 671f79fbcd4eabb87754ed3b960742628f095a03 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 25 Apr 2022 10:08:59 +0800 Subject: [PATCH 01/33] fix sum case --- tests/system-test/2-query/sum.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index 757f5c3c90..467d90ca8d 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -61,6 +61,7 @@ class TDTestCase: tdSql.checkData(0, 0, sum_data) tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + tdSql.query(f"select sum( {condition} ) from {tbname} {where_condition} {group_condition} ") def __sum_err_check(self,tbanme): sqls = [] From dd3674cae05f74f6e0b4b44a004c18785794e400 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 25 Apr 2022 18:53:51 +0800 Subject: [PATCH 02/33] add upper case --- tests/system-test/2-query/upper.py | 240 +++++++++++++++++++++++++++++ tests/system-test/fulltest.sh | 2 +- 2 files changed, 241 insertions(+), 1 deletion(-) create mode 100644 tests/system-test/2-query/upper.py diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py new file mode 100644 index 0000000000..afb34cc36a --- /dev/null +++ b/tests/system-test/2-query/upper.py @@ -0,0 +1,240 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +TS_TYPE_COL = [TS_COL] + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __upper_condition(self): + upper_condition = [] + for char_col in CHAR_COL: + upper_condition.extend( + ( + char_col, + f"lower( {char_col} )", + ) + ) + upper_condition.extend(f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + upper_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + upper_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + + upper_condition.append('test1234!@#$%^&*():"> 0 " + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __upper_current_check(self, tbname): + upper_condition = self.__upper_condition() + for condition in upper_condition: + where_condition = self.__where_condition(condition) + group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + upper_data = [ str(data).upper() if data else None for data in datas ] + tdSql.query(f"select upper( {condition} ) from {tbname} {where_condition} ") + for i in range(len(upper_data)): + tdSql.checkData(i, 0, upper_data[i] ) if upper_data[i] else tdSql.checkData(i, 0, None) + + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + + def __upper_err_check(self,tbanme): + sqls = [] + + for un_char_col in UN_CHAR_COL: + sqls.extend( + ( + f"select upper( {un_char_col} ) from {tbanme} ", + f"select upper(ceil( {un_char_col} )) from {tbanme} ", + ) + ) + sqls.extend( f"select upper( {un_char_col} + {un_char_col_2} ) from {tbanme} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select upper( {un_char_col} + {ts_col} ) from {tbanme} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select upper( {ts_col} ) from {tbanme} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select upper( {char_col} + {ts_col} ) from {tbanme} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select upper( {char_col} + {char_col_2} ) from {tbanme} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( + ( + f"select upper() from {tbanme} ", + f"select upper(*) from {tbanme} ", + f"select upper(ccccccc) from {tbanme} ", + f"select upper(111) from {tbanme} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + for tb in tbname: + self.__upper_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + + for tb in tbname: + for errsql in self.__upper_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + for i in range(9): + tdSql.execute( + f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + '''insert into ct1 values + ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) + ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", now()-{i}s ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.__insert_data(100) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/fulltest.sh b/tests/system-test/fulltest.sh index 2954358feb..10e8399ff4 100755 --- a/tests/system-test/fulltest.sh +++ b/tests/system-test/fulltest.sh @@ -2,6 +2,6 @@ set -e #python3 ./test.py -f 2-query/between.py -#python3 ./test.py -f 2-query/distinct.py +python3 ./test.py -f 2-query/distinct.py python3 ./test.py -f 2-query/varchar.py #python3 ./test.py -f 2-query/cast.py From b5ac628aa0dcee4cb3165e5e226f22d3518bd0d8 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 25 Apr 2022 19:09:21 +0800 Subject: [PATCH 03/33] fix case --- tests/system-test/2-query/upper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index afb34cc36a..d3f786a7ba 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -44,7 +44,8 @@ class TDTestCase: return upper_condition def __where_condition(self, col): - return f" where count({col}) > 0 " + # return f" where count({col}) > 0 " + return "" def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " From 31b1521a8bc4a97aaf63f0b4031f604bf2dea7a7 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 25 Apr 2022 19:13:36 +0800 Subject: [PATCH 04/33] fix --- tests/system-test/2-query/upper.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index d3f786a7ba..a4fd3cf661 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -62,12 +62,10 @@ class TDTestCase: tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] upper_data = [ str(data).upper() if data else None for data in datas ] - tdSql.query(f"select upper( {condition} ) from {tbname} {where_condition} ") + tdSql.query(f"select upper( {condition} ) from {tbname} {where_condition} {group_condition}") for i in range(len(upper_data)): tdSql.checkData(i, 0, upper_data[i] ) if upper_data[i] else tdSql.checkData(i, 0, None) - tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") - def __upper_err_check(self,tbanme): sqls = [] From e8b16c6c7f395dd214cf1d69b079f0773bff58dd Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 25 Apr 2022 19:16:26 +0800 Subject: [PATCH 05/33] fix --- tests/system-test/2-query/upper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index a4fd3cf661..ea490304ac 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -39,7 +39,7 @@ class TDTestCase: upper_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) upper_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) - upper_condition.append('test1234!@#$%^&*():"> Date: Mon, 25 Apr 2022 19:18:46 +0800 Subject: [PATCH 06/33] fix --- tests/system-test/2-query/upper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index ea490304ac..5d7dc08db0 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -39,7 +39,7 @@ class TDTestCase: upper_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) upper_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) - upper_condition.append('"test1234!@#$%^&*():"> Date: Mon, 25 Apr 2022 19:19:46 +0800 Subject: [PATCH 07/33] fix case --- tests/system-test/2-query/upper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index 5d7dc08db0..399ead54e9 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -39,7 +39,7 @@ class TDTestCase: upper_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) upper_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) - upper_condition.append('''test1234!@#$%^&*():'> Date: Tue, 26 Apr 2022 10:36:33 +0800 Subject: [PATCH 08/33] add error cse --- tests/system-test/2-query/upper.py | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index 399ead54e9..8ece046492 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -66,28 +66,31 @@ class TDTestCase: for i in range(len(upper_data)): tdSql.checkData(i, 0, upper_data[i] ) if upper_data[i] else tdSql.checkData(i, 0, None) - def __upper_err_check(self,tbanme): + def __upper_err_check(self,tbname): sqls = [] for un_char_col in UN_CHAR_COL: sqls.extend( ( - f"select upper( {un_char_col} ) from {tbanme} ", - f"select upper(ceil( {un_char_col} )) from {tbanme} ", + f"select upper( {un_char_col} ) from {tbname} ", + f"select upper(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by upper( {un_char_col} ) ", ) ) - sqls.extend( f"select upper( {un_char_col} + {un_char_col_2} ) from {tbanme} " for un_char_col_2 in UN_CHAR_COL ) - sqls.extend( f"select upper( {un_char_col} + {ts_col} ) from {tbanme} " for ts_col in TS_TYPE_COL ) - sqls.extend( f"select upper( {ts_col} ) from {tbanme} " for ts_col in TS_TYPE_COL ) - sqls.extend( f"select upper( {char_col} + {ts_col} ) from {tbanme} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) - sqls.extend( f"select upper( {char_col} + {char_col_2} ) from {tbanme} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select upper( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by upper( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select upper( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select upper( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select upper( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) sqls.extend( ( - f"select upper() from {tbanme} ", - f"select upper(*) from {tbanme} ", - f"select upper(ccccccc) from {tbanme} ", - f"select upper(111) from {tbanme} ", + f"select upper() from {tbname} ", + f"select upper(*) from {tbname} ", + f"select upper(ccccccc) from {tbname} ", + f"select upper(111) from {tbname} ", ) ) From b08a3bad5ada772b5ba3904ade3d1a39d81d3120 Mon Sep 17 00:00:00 2001 From: cpwu Date: Tue, 26 Apr 2022 17:02:53 +0800 Subject: [PATCH 09/33] fix case --- tests/system-test/2-query/cast.py | 38 ++++++++++++++++++++++++++++--- 1 file changed, 35 insertions(+), 3 deletions(-) diff --git a/tests/system-test/2-query/cast.py b/tests/system-test/2-query/cast.py index fb0f22eaf8..0e849410b7 100644 --- a/tests/system-test/2-query/cast.py +++ b/tests/system-test/2-query/cast.py @@ -35,9 +35,7 @@ class TDTestCase: for i in range(len(tdSql.queryRows)): if data_tb_col[i] is None: tdSql.checkData( i, 0 , None ) - if (col_name == "c2" or col_name == "double" ) and tbname == "t1" and i == 10: - continue - else: + if col_name not in ["c2", "double"] or tbname != "t1" or i != 10: utc_zone = datetime.timezone.utc utc_8 = datetime.timezone(datetime.timedelta(hours=8)) date_init_stamp = datetime.datetime.utcfromtimestamp(data_tb_col[i]/1000) @@ -597,7 +595,41 @@ class TDTestCase: time2str = str(int(datetime.datetime.timestamp(data_t1_c10[i])*1000)) tdSql.checkData( i, 0, time2str ) + tdLog.printNoPrefix("==========step39: cast constant operation to bigint, expect change to int ") + tdSql.query("select cast(12121.23323131 as bigint) as b from ct4") + ( tdSql.checkData(i, 0, 12121) for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 as binary(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows)) ) + tdSql.query("select cast(12121.23323131 as binary(2)) as b from ct4") + ( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 as nchar(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 as nchar(2)) as b from ct4") + ( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 321.876897998 as bigint) as b from ct4") + ( tdSql.checkData(i, 0, 12443) for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 321.876897998 as binary(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12443.110129') for i in range(len(tdSql.queryRows)) ) + tdSql.query("select cast(12121.23323131 + 321.876897998 as binary(3)) as b from ct4") + ( tdSql.checkData(i, 0, '124') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 321.876897998 as nchar(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12443.110129') for i in range(len(tdSql.queryRows)) ) + tdSql.query("select cast(12121.23323131 + 321.876897998 as nchar(3)) as b from ct4") + ( tdSql.checkData(i, 0, '124') for i in range(len(tdSql.queryRows) ) ) + + tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as bigint) as b from ct4") + ( tdSql.checkData(i, 0, 12121) for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as binary(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows)) ) + tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as binary(2)) as b from ct4") + ( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as nchar(16)) as b from ct4") + ( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows) ) ) + tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as nchar(2)) as b from ct4") + ( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) ) + + tdLog.printNoPrefix("==========step40: error cast condition, should return error ") tdSql.error("select cast(c1 as int) as b from ct4") tdSql.error("select cast(c1 as bool) as b from ct4") tdSql.error("select cast(c1 as tinyint) as b from ct4") From 24f8d83d057ffa3752574f9da9de1b640b624c36 Mon Sep 17 00:00:00 2001 From: cpwu Date: Wed, 27 Apr 2022 15:20:25 +0800 Subject: [PATCH 10/33] add case --- tests/system-test/2-query/join.py | 248 ++++++++++++++++++++++++++++ tests/system-test/2-query/length.py | 245 +++++++++++++++++++++++++++ tests/system-test/2-query/lower.py | 245 +++++++++++++++++++++++++++ tests/system-test/2-query/upper.py | 2 + 4 files changed, 740 insertions(+) create mode 100644 tests/system-test/2-query/join.py create mode 100644 tests/system-test/2-query/length.py create mode 100644 tests/system-test/2-query/lower.py diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py new file mode 100644 index 0000000000..e2dadbb5a6 --- /dev/null +++ b/tests/system-test/2-query/join.py @@ -0,0 +1,248 @@ +import datetime + +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +TS_TYPE_COL = [TS_COL] + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __length_condition(self): + length_condition = [] + for char_col in CHAR_COL: + length_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + + length_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __length_current_check(self, tbname): + length_condition = self.__length_condition() + for condition in length_condition: + where_condition = self.__where_condition(condition) + group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + length_data = [ len(str(data)) if data else None for data in datas ] + tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") + for i in range(len(length_data)): + tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) + + def __length_err_check(self,tbname): + sqls = [] + + for un_char_col in UN_CHAR_COL: + sqls.extend( + ( + f"select length( {un_char_col} ) from {tbname} ", + f"select length(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ", + ) + ) + + sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select length() from {tbname} ", + f"select length(*) from {tbname} ", + f"select length(ccccccc) from {tbname} ", + f"select length(111) from {tbname} ", + f"select length(c8, 11) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + for tb in tbname: + self.__length_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + + for tb in tbname: + for errsql in self.__length_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = datetime.datetime.timestamp(datetime.datetime.now()) * 1000 + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nachar_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nachar_limit-2", { now_time - 172800000 } + ) + ''' + ) + + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.__insert_data(100) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py new file mode 100644 index 0000000000..803cc3fcf4 --- /dev/null +++ b/tests/system-test/2-query/length.py @@ -0,0 +1,245 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +TS_TYPE_COL = [TS_COL] + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __length_condition(self): + length_condition = [] + for char_col in CHAR_COL: + length_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + + length_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __length_current_check(self, tbname): + length_condition = self.__length_condition() + for condition in length_condition: + where_condition = self.__where_condition(condition) + group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + length_data = [ len(str(data)) if data else None for data in datas ] + tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") + for i in range(len(length_data)): + tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) + + def __length_err_check(self,tbname): + sqls = [] + + for un_char_col in UN_CHAR_COL: + sqls.extend( + ( + f"select length( {un_char_col} ) from {tbname} ", + f"select length(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ", + ) + ) + + sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select length() from {tbname} ", + f"select length(*) from {tbname} ", + f"select length(ccccccc) from {tbname} ", + f"select length(111) from {tbname} ", + f"select length(c8, 11) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + for tb in tbname: + self.__length_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + + for tb in tbname: + for errsql in self.__length_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + for i in range(9): + tdSql.execute( + f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + '''insert into ct1 values + ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) + ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", now()-{i}s ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.__insert_data(100) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py new file mode 100644 index 0000000000..010e33d738 --- /dev/null +++ b/tests/system-test/2-query/lower.py @@ -0,0 +1,245 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +TS_TYPE_COL = [TS_COL] + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __lower_condition(self): + lower_condition = [] + for char_col in CHAR_COL: + lower_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + lower_condition.extend(f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + lower_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + lower_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + + lower_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __lower_current_check(self, tbname): + lower_condition = self.__lower_condition() + for condition in lower_condition: + where_condition = self.__where_condition(condition) + group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + lower_data = [ str(data).lower() if data else None for data in datas ] + tdSql.query(f"select lower( {condition} ) from {tbname} {where_condition} {group_condition}") + for i in range(len(lower_data)): + tdSql.checkData(i, 0, lower_data[i] ) if lower_data[i] else tdSql.checkData(i, 0, None) + + def __lower_err_check(self,tbname): + sqls = [] + + for un_char_col in UN_CHAR_COL: + sqls.extend( + ( + f"select lower( {un_char_col} ) from {tbname} ", + f"select lower(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by lower( {un_char_col} ) ", + ) + ) + + sqls.extend( f"select lower( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select lower( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by lower( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select lower( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select lower( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select lower( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select lower() from {tbname} ", + f"select lower(*) from {tbname} ", + f"select lower(ccccccc) from {tbname} ", + f"select lower(111) from {tbname} ", + f"select lower(c8, 11) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + for tb in tbname: + self.__lower_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1"] + + for tb in tbname: + for errsql in self.__lower_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + for i in range(9): + tdSql.execute( + f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + '''insert into ct1 values + ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) + ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", now()-{i}s ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nachar_limit-1", now()-1d + ) + ( + now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nachar_limit-2", now()-2d + ) + ''' + ) + + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.__insert_data(100) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index 8ece046492..d2aa3c5560 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -85,6 +85,8 @@ class TDTestCase: sqls.extend( f"select upper( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) sqls.extend( f"select upper( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) sqls.extend( f"select upper( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) sqls.extend( ( f"select upper() from {tbname} ", From 17e2cd959e84ce2bf479f13aa9e0569a1259b44a Mon Sep 17 00:00:00 2001 From: cpwu Date: Wed, 27 Apr 2022 17:04:25 +0800 Subject: [PATCH 11/33] fix case --- tests/system-test/2-query/join.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index e2dadbb5a6..740dbff6f4 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -146,7 +146,7 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - now_time = datetime.datetime.timestamp(datetime.datetime.now()) * 1000 + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) for i in range(rows): tdSql.execute( f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" From fc463a30aa12ed102f06450e9b6104717c0c400d Mon Sep 17 00:00:00 2001 From: cpwu Date: Wed, 27 Apr 2022 17:09:39 +0800 Subject: [PATCH 12/33] fix case --- tests/system-test/2-query/join.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index 740dbff6f4..6e85ce4e49 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -144,18 +144,19 @@ class TDTestCase: for i in range(4): tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2} def __insert_data(self, rows): now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) for i in range(rows): tdSql.execute( - f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values @@ -198,7 +199,7 @@ class TDTestCase: for i in range(rows): insert_data = f'''insert into t1 values - ( { now_time - i * 3600000 }, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) From 813b55779dbea453369233f7c25b5aa676e71238 Mon Sep 17 00:00:00 2001 From: cpwu Date: Wed, 27 Apr 2022 18:01:14 +0800 Subject: [PATCH 13/33] fix case --- tests/system-test/2-query/join.py | 12 ++++++------ tests/system-test/2-query/length.py | 24 +++++++++++++++++------- tests/system-test/2-query/lower.py | 12 ++++++------ tests/system-test/2-query/sum.py | 12 ++++++------ tests/system-test/2-query/upper.py | 12 ++++++------ 5 files changed, 41 insertions(+), 31 deletions(-) diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index 6e85ce4e49..af4e2b93c1 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -172,11 +172,11 @@ class TDTestCase: ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", { now_time - 86400000} + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", { now_time - 172800000} + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) @@ -188,11 +188,11 @@ class TDTestCase: ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", { now_time - 86400000 } + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", { now_time - 172800000 } + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) @@ -210,12 +210,12 @@ class TDTestCase: ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nachar_limit-1", { now_time - 86400000 } + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nachar_limit-2", { now_time - 172800000 } + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 803cc3fcf4..2bf3f22b56 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -61,7 +61,17 @@ class TDTestCase: for group_condition in groups: tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] - length_data = [ len(str(data)) if data else None for data in datas ] + # length_data = [ len(str(data)) if data else None for data in datas ] + length_data = [] + for data in datas : + if not datas: + length_data.append(None) + elif "nchar" in length_condition or NCHAR_COL in length_condition: + length_data.append(len(str(data)) * 4) + else: + length_data.append(len(str(data))) + + tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") for i in range(len(length_data)): tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) @@ -168,11 +178,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -184,11 +194,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -206,12 +216,12 @@ class TDTestCase: ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nachar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", now()-1d ) ( now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nachar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index 010e33d738..322a7f73fd 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -168,11 +168,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -184,11 +184,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -206,12 +206,12 @@ class TDTestCase: ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nachar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", now()-1d ) ( now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nachar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index 467d90ca8d..5b78550c2d 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -156,11 +156,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -172,11 +172,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -194,12 +194,12 @@ class TDTestCase: ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nachar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", now()-1d ) ( now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nachar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index d2aa3c5560..e075f3a111 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -167,11 +167,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -183,11 +183,11 @@ class TDTestCase: ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d ) ( now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) @@ -205,12 +205,12 @@ class TDTestCase: ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nachar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", now()-1d ) ( now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nachar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", now()-2d ) ''' ) From 0dd7ce47ac168b45e8ad4d0702ecc6fc569ee824 Mon Sep 17 00:00:00 2001 From: cpwu Date: Wed, 27 Apr 2022 19:06:54 +0800 Subject: [PATCH 14/33] fix case --- tests/system-test/2-query/length.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 2bf3f22b56..b9ad6b9944 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -66,7 +66,7 @@ class TDTestCase: for data in datas : if not datas: length_data.append(None) - elif "nchar" in length_condition or NCHAR_COL in length_condition: + elif "nchar" in condition or NCHAR_COL in condition: length_data.append(len(str(data)) * 4) else: length_data.append(len(str(data))) From cdbce2980afa9bdd162a579cac60c57649ed3c63 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 16:39:10 +0800 Subject: [PATCH 15/33] fix case --- tests/system-test/2-query/length.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index b9ad6b9944..67742709ff 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -64,14 +64,13 @@ class TDTestCase: # length_data = [ len(str(data)) if data else None for data in datas ] length_data = [] for data in datas : - if not datas: + if not data: length_data.append(None) - elif "nchar" in condition or NCHAR_COL in condition: + elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): length_data.append(len(str(data)) * 4) else: length_data.append(len(str(data))) - tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") for i in range(len(length_data)): tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) @@ -111,14 +110,14 @@ class TDTestCase: def __test_current(self): tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: self.__length_current_check(tb) tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: for errsql in self.__length_err_check(tb): From 1dd478d1391f7a5c62e72077d3eeed09ae5f01d8 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 17:21:25 +0800 Subject: [PATCH 16/33] add char_length case --- tests/system-test/2-query/char_length.py | 245 +++++++++++++++++++++++ tests/system-test/2-query/lower.py | 4 +- tests/system-test/2-query/upper.py | 4 +- 3 files changed, 249 insertions(+), 4 deletions(-) create mode 100644 tests/system-test/2-query/char_length.py diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py new file mode 100644 index 0000000000..26c1572584 --- /dev/null +++ b/tests/system-test/2-query/char_length.py @@ -0,0 +1,245 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +TS_TYPE_COL = [TS_COL] + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __length_condition(self): + length_condition = [] + for char_col in CHAR_COL: + length_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + + length_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __length_current_check(self, tbname): + length_condition = self.__length_condition() + for condition in length_condition: + where_condition = self.__where_condition(condition) + group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + length_data = [ len(str(data)) if data else None for data in datas ] + tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") + for i in range(len(length_data)): + tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) + + def __length_err_check(self,tbname): + sqls = [] + + for un_char_col in UN_CHAR_COL: + sqls.extend( + ( + f"select length( {un_char_col} ) from {tbname} ", + f"select length(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ", + ) + ) + + sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select length() from {tbname} ", + f"select length(*) from {tbname} ", + f"select length(ccccccc) from {tbname} ", + f"select length(111) from {tbname} ", + f"select length(c8, 11) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + self.__length_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__length_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + for i in range(9): + tdSql.execute( + f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + ) + tdSql.execute( + '''insert into ct1 values + ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) + ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + ) + ( + now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", now()-{i}s ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_limit-1", now()-1d + ) + ( + now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_limit-2", now()-2d + ) + ''' + ) + + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.__insert_data(100) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index 322a7f73fd..c45e48c0c1 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -101,14 +101,14 @@ class TDTestCase: def __test_current(self): tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: self.__lower_current_check(tb) tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: for errsql in self.__lower_err_check(tb): diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index e075f3a111..cdeedc9307 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -100,14 +100,14 @@ class TDTestCase: def __test_current(self): tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: self.__upper_current_check(tb) tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") - tbname = ["ct1", "ct2", "ct4", "t1"] + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: for errsql in self.__upper_err_check(tb): From 2598bb81022213ea28111c68b80b28fa2047b9fc Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 17:29:29 +0800 Subject: [PATCH 17/33] fix case --- tests/system-test/2-query/char_length.py | 64 ++++++++++++------------ 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index 26c1572584..5452dcfbfc 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -26,22 +26,22 @@ class TDTestCase: tdLog.debug(f"start to excute {__file__}") tdSql.init(conn.cursor()) - def __length_condition(self): - length_condition = [] + def __char_char_length_condition(self): + char_length_condition = [] for char_col in CHAR_COL: - length_condition.extend( + char_length_condition.extend( ( char_col, f"upper( {char_col} )", ) ) - length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) - length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) - length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) + char_length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL) + char_length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL ) + char_length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL ) - length_condition.append('''"test1234!@#$%^&*():'> 0 " @@ -50,9 +50,9 @@ class TDTestCase: def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " - def __length_current_check(self, tbname): - length_condition = self.__length_condition() - for condition in length_condition: + def __char_length_current_check(self, tbname): + char_length_condition = self.__char_length_condition() + for condition in char_length_condition: where_condition = self.__where_condition(condition) group_having = self.__group_condition(condition, having=f"{condition} is not null " ) group_no_having= self.__group_condition(condition ) @@ -61,39 +61,39 @@ class TDTestCase: for group_condition in groups: tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] - length_data = [ len(str(data)) if data else None for data in datas ] - tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") - for i in range(len(length_data)): - tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) + char_length_data = [ len(str(data)) if data else None for data in datas ] + tdSql.query(f"select char_length( {condition} ) from {tbname} {where_condition} {group_condition}") + for i in range(len(char_length_data)): + tdSql.checkData(i, 0, char_length_data[i] ) if char_length_data[i] else tdSql.checkData(i, 0, None) - def __length_err_check(self,tbname): + def __char_length_err_check(self,tbname): sqls = [] for un_char_col in UN_CHAR_COL: sqls.extend( ( - f"select length( {un_char_col} ) from {tbname} ", - f"select length(ceil( {un_char_col} )) from {tbname} ", - f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ", + f"select char_length( {un_char_col} ) from {tbname} ", + f"select char_length(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by char_length( {un_char_col} ) ", ) ) - sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) - sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select char_length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL ) + sqls.extend( f"select char_length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) - sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL) - sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) - sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) - sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select {char_col} from {tbname} group by char_length( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select char_length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select char_length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select char_length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) sqls.extend( ( - f"select length() from {tbname} ", - f"select length(*) from {tbname} ", - f"select length(ccccccc) from {tbname} ", - f"select length(111) from {tbname} ", - f"select length(c8, 11) from {tbname} ", + f"select char_length() from {tbname} ", + f"select char_length(*) from {tbname} ", + f"select char_length(ccccccc) from {tbname} ", + f"select char_length(111) from {tbname} ", + f"select char_length(c8, 11) from {tbname} ", ) ) @@ -103,7 +103,7 @@ class TDTestCase: tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - self.__length_current_check(tb) + self.__char_length_current_check(tb) tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): @@ -111,7 +111,7 @@ class TDTestCase: tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - for errsql in self.__length_err_check(tb): + for errsql in self.__char_length_err_check(tb): tdSql.error(sql=errsql) tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") From 3bbc422bd0227cfee202dc32bd4557d4504ba0cb Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 17:29:39 +0800 Subject: [PATCH 18/33] fix case --- tests/system-test/2-query/char_length.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index 5452dcfbfc..814c208217 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -26,7 +26,7 @@ class TDTestCase: tdLog.debug(f"start to excute {__file__}") tdSql.init(conn.cursor()) - def __char_char_length_condition(self): + def __char_length_condition(self): char_length_condition = [] for char_col in CHAR_COL: char_length_condition.extend( From 2d43f3e56cdd9ac7db9ba473f75214251f1fdb18 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 17:59:39 +0800 Subject: [PATCH 19/33] fix case --- tests/system-test/2-query/length.py | 37 ++++++++++++++++++----------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 67742709ff..acd47da16c 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -59,21 +59,30 @@ class TDTestCase: groups = ["", group_having, group_no_having] for group_condition in groups: - tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") - datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] - # length_data = [ len(str(data)) if data else None for data in datas ] - length_data = [] - for data in datas : - if not data: - length_data.append(None) - elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): - length_data.append(len(str(data)) * 4) - else: - length_data.append(len(str(data))) + # tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") + # datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] + # # length_data = [ len(str(data)) if data else None for data in datas ] + # length_data = [] + # for data in datas : + # if not data: + # length_data.append(None) + # elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): + # length_data.append(len(str(data)) * 4) + # else: + # length_data.append(len(str(data))) + + # tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") + # for i in range(len(length_data)): + # tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) + tdSql.query(f"select {condition}, length( {condition} ) from {tbname} {where_condition} {group_condition} ") + for i in range(tdSql.queryRows): + if not tdSql.getData(i,1): + tdSql.checkData(i, 1, None) + elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): + tdSql.checkData(i, 1, len(tdSql.getData(i,1)) * 4 ) + else: + tdSql.checkData(i, 1, len(tdSql.getData(i,1))) - tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") - for i in range(len(length_data)): - tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) def __length_err_check(self,tbname): sqls = [] From 081310bb39b1152f4b9166e70cb56dff1086f4f8 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 18:05:14 +0800 Subject: [PATCH 20/33] fix case --- tests/system-test/2-query/length.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index acd47da16c..3dc41ef2e0 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -79,9 +79,9 @@ class TDTestCase: if not tdSql.getData(i,1): tdSql.checkData(i, 1, None) elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): - tdSql.checkData(i, 1, len(tdSql.getData(i,1)) * 4 ) + tdSql.checkData(i, 1, len(str(tdSql.getData(i,1) ) ) * 4 ) else: - tdSql.checkData(i, 1, len(tdSql.getData(i,1))) + tdSql.checkData(i, 1, len(str(tdSql.getData(i,1) ) ) ) def __length_err_check(self,tbname): From 0dfd9dc03f95edbc7ceaf974ff8cc8623ecd4a8d Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 18:16:27 +0800 Subject: [PATCH 21/33] fix case --- tests/system-test/2-query/length.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 3dc41ef2e0..7f88be9180 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -79,9 +79,9 @@ class TDTestCase: if not tdSql.getData(i,1): tdSql.checkData(i, 1, None) elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): - tdSql.checkData(i, 1, len(str(tdSql.getData(i,1) ) ) * 4 ) + tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) * 4 ) else: - tdSql.checkData(i, 1, len(str(tdSql.getData(i,1) ) ) ) + tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) ) def __length_err_check(self,tbname): From a95a5b106baf7bb560f2947807397f463e519fad Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 18:21:00 +0800 Subject: [PATCH 22/33] fix case --- tests/system-test/2-query/char_length.py | 14 ++++++++------ tests/system-test/2-query/length.py | 15 --------------- 2 files changed, 8 insertions(+), 21 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index 814c208217..fe9f5807ef 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -59,12 +59,14 @@ class TDTestCase: groups = ["", group_having, group_no_having] for group_condition in groups: - tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") - datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] - char_length_data = [ len(str(data)) if data else None for data in datas ] - tdSql.query(f"select char_length( {condition} ) from {tbname} {where_condition} {group_condition}") - for i in range(len(char_length_data)): - tdSql.checkData(i, 0, char_length_data[i] ) if char_length_data[i] else tdSql.checkData(i, 0, None) + tdSql.query(f"select {condition}, length( {condition} ) from {tbname} {where_condition} {group_condition} ") + for i in range(tdSql.queryRows): + if not tdSql.getData(i,1): + tdSql.checkData(i, 1, None) + # elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): + # tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) * 4 ) + else: + tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) ) def __char_length_err_check(self,tbname): sqls = [] diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 7f88be9180..a55c9a5bb7 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -59,21 +59,6 @@ class TDTestCase: groups = ["", group_having, group_no_having] for group_condition in groups: - # tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ") - # datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)] - # # length_data = [ len(str(data)) if data else None for data in datas ] - # length_data = [] - # for data in datas : - # if not data: - # length_data.append(None) - # elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition): - # length_data.append(len(str(data)) * 4) - # else: - # length_data.append(len(str(data))) - - # tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}") - # for i in range(len(length_data)): - # tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None) tdSql.query(f"select {condition}, length( {condition} ) from {tbname} {where_condition} {group_condition} ") for i in range(tdSql.queryRows): if not tdSql.getData(i,1): From cfd94e6dc2a80b20ec49281492522f21dc8214c0 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 19:18:51 +0800 Subject: [PATCH 23/33] fix case --- tests/system-test/2-query/char_length.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index fe9f5807ef..a27c11ed82 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -59,7 +59,7 @@ class TDTestCase: groups = ["", group_having, group_no_having] for group_condition in groups: - tdSql.query(f"select {condition}, length( {condition} ) from {tbname} {where_condition} {group_condition} ") + tdSql.query(f"select {condition}, char_length( {condition} ) from {tbname} {where_condition} {group_condition} ") for i in range(tdSql.queryRows): if not tdSql.getData(i,1): tdSql.checkData(i, 1, None) From 0618e8a34ca8607405630d0a512ddeb581bcf086 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 19:22:27 +0800 Subject: [PATCH 24/33] fix case --- tests/system-test/2-query/char_length.py | 61 ++++++++++++------------ tests/system-test/2-query/length.py | 61 ++++++++++++------------ tests/system-test/2-query/lower.py | 61 ++++++++++++------------ tests/system-test/2-query/sum.py | 61 ++++++++++++------------ 4 files changed, 124 insertions(+), 120 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index a27c11ed82..b2998a4d1d 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -146,74 +146,75 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - for i in range(9): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): tdSql.execute( - f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - '''insert into ct1 values - ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) - ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) ''' ) tdSql.execute( f'''insert into ct4 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( - now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) tdSql.execute( f'''insert into ct2 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) for i in range(rows): insert_data = f'''insert into t1 values - ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, - "binary_{i}", "nchar_{i}", now()-{i}s ) + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) tdSql.execute( f'''insert into t1 values - ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nchar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nchar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index a55c9a5bb7..1262030305 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -147,74 +147,75 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - for i in range(9): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): tdSql.execute( - f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - '''insert into ct1 values - ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) - ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) ''' ) tdSql.execute( f'''insert into ct4 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( - now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) tdSql.execute( f'''insert into ct2 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) for i in range(rows): insert_data = f'''insert into t1 values - ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, - "binary_{i}", "nchar_{i}", now()-{i}s ) + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) tdSql.execute( f'''insert into t1 values - ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nchar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nchar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index c45e48c0c1..3e7a9a59c9 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -144,74 +144,75 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - for i in range(9): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): tdSql.execute( - f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - '''insert into ct1 values - ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) - ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) ''' ) tdSql.execute( f'''insert into ct4 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( - now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) tdSql.execute( f'''insert into ct2 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) for i in range(rows): insert_data = f'''insert into t1 values - ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, - "binary_{i}", "nchar_{i}", now()-{i}s ) + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) tdSql.execute( f'''insert into t1 values - ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nchar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nchar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index 5b78550c2d..c867204fbf 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -132,74 +132,75 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - for i in range(9): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): tdSql.execute( - f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - '''insert into ct1 values - ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) - ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) ''' ) tdSql.execute( f'''insert into ct4 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( - now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) tdSql.execute( f'''insert into ct2 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) for i in range(rows): insert_data = f'''insert into t1 values - ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, - "binary_{i}", "nchar_{i}", now()-{i}s ) + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) tdSql.execute( f'''insert into t1 values - ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nchar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nchar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) From dbf17d0db5ed6829336d3e37419c3ea8f0cf9fb7 Mon Sep 17 00:00:00 2001 From: cpwu Date: Thu, 28 Apr 2022 19:25:17 +0800 Subject: [PATCH 25/33] add case --- tests/system-test/2-query/char_length.py | 2 +- tests/system-test/2-query/join.py | 2 +- tests/system-test/2-query/length.py | 2 +- tests/system-test/2-query/lower.py | 2 +- tests/system-test/2-query/sum.py | 2 +- tests/system-test/2-query/upper.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index b2998a4d1d..31a58c2b26 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -227,7 +227,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index af4e2b93c1..14b2c3978d 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -228,7 +228,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 1262030305..57a8bf71f3 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -228,7 +228,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index 3e7a9a59c9..0af328fb32 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -225,7 +225,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index c867204fbf..e9728abe63 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -213,7 +213,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index cdeedc9307..52cc8d4ca9 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -223,7 +223,7 @@ class TDTestCase: self.__create_tb() tdLog.printNoPrefix("==========step2:insert data") - self.__insert_data(100) + self.__insert_data(10) tdLog.printNoPrefix("==========step3:all check") self.all_test() From 640a26524a731ba270dc9e2dd34c319075b7e3b5 Mon Sep 17 00:00:00 2001 From: plum-lihui Date: Thu, 28 Apr 2022 19:50:06 +0800 Subject: [PATCH 26/33] [test: add test case for check network by taos] --- tests/system-test/fulltest.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/system-test/fulltest.sh b/tests/system-test/fulltest.sh index 94ac666272..d7c493e870 100755 --- a/tests/system-test/fulltest.sh +++ b/tests/system-test/fulltest.sh @@ -3,6 +3,8 @@ set -e set -x python3 ./test.py -f 0-others/taosShell.py +python3 ./test.py -f 0-others/taosShellError.py +python3 ./test.py -f 0-others/taosShellNetChk.py #python3 ./test.py -f 2-query/between.py From d8f8e075e105add4c17412cbc4935c452941e451 Mon Sep 17 00:00:00 2001 From: Ganlin Zhao Date: Thu, 28 Apr 2022 21:27:46 +0800 Subject: [PATCH 27/33] feat(query): add spread function TD-14297 --- include/libs/function/functionMgt.h | 1 + source/libs/function/inc/builtinsimpl.h | 5 + source/libs/function/src/builtins.c | 977 +++++++++++++----------- source/libs/function/src/builtinsimpl.c | 105 +++ 4 files changed, 657 insertions(+), 431 deletions(-) diff --git a/include/libs/function/functionMgt.h b/include/libs/function/functionMgt.h index 0572262bfc..f6304401fc 100644 --- a/include/libs/function/functionMgt.h +++ b/include/libs/function/functionMgt.h @@ -40,6 +40,7 @@ typedef enum EFunctionType { FUNCTION_TYPE_STDDEV, FUNCTION_TYPE_SUM, FUNCTION_TYPE_TWA, + FUNCTION_TYPE_HISTOGRAM, // nonstandard SQL function FUNCTION_TYPE_BOTTOM = 500, diff --git a/source/libs/function/inc/builtinsimpl.h b/source/libs/function/inc/builtinsimpl.h index d419feca45..87cd3e24a8 100644 --- a/source/libs/function/inc/builtinsimpl.h +++ b/source/libs/function/inc/builtinsimpl.h @@ -68,6 +68,11 @@ bool getTopBotFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv); int32_t topFunction(SqlFunctionCtx *pCtx); int32_t topBotFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock); +bool getSpreadFuncEnv(struct SFunctionNode* pFunc, SFuncExecEnv* pEnv); +bool spreadFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo); +int32_t spreadFunction(SqlFunctionCtx* pCtx); +int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock); + #ifdef __cplusplus } #endif diff --git a/source/libs/function/src/builtins.c b/source/libs/function/src/builtins.c index 45cfbae1ad..6d590662df 100644 --- a/source/libs/function/src/builtins.c +++ b/source/libs/function/src/builtins.c @@ -212,7 +212,16 @@ static int32_t translateBottom(SFunctionNode* pFunc, char* pErrBuf, int32_t len) } static int32_t translateSpread(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // todo + if (1 != LIST_LENGTH(pFunc->pParameterList)) { + return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); + } + + uint8_t paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 0))->resType.type; + if (!IS_NUMERIC_TYPE(paraType) && TSDB_DATA_TYPE_TIMESTAMP != paraType) { + return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); + } + + pFunc->node.resType = (SDataType) { .bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE }; return TSDB_CODE_SUCCESS; } @@ -431,435 +440,541 @@ static int32_t translateToJson(SFunctionNode* pFunc, char* pErrBuf, int32_t len) } const SBuiltinFuncDefinition funcMgtBuiltins[] = { - {.name = "count", - .type = FUNCTION_TYPE_COUNT, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, - .translateFunc = translateCount, - .dataRequiredFunc = countDataRequired, - .getEnvFunc = getCountFuncEnv, - .initFunc = functionSetup, - .processFunc = countFunction, - .finalizeFunc = functionFinalize}, - {.name = "sum", - .type = FUNCTION_TYPE_SUM, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, - .translateFunc = translateSum, - .dataRequiredFunc = statisDataRequired, - .getEnvFunc = getSumFuncEnv, - .initFunc = functionSetup, - .processFunc = sumFunction, - .finalizeFunc = functionFinalize}, - {.name = "min", - .type = FUNCTION_TYPE_MIN, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, - .translateFunc = translateInOutNum, - .dataRequiredFunc = statisDataRequired, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = minFunctionSetup, - .processFunc = minFunction, - .finalizeFunc = functionFinalize}, - {.name = "max", - .type = FUNCTION_TYPE_MAX, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, - .translateFunc = translateInOutNum, - .dataRequiredFunc = statisDataRequired, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = maxFunctionSetup, - .processFunc = maxFunction, - .finalizeFunc = functionFinalize}, - {.name = "stddev", - .type = FUNCTION_TYPE_STDDEV, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = getStddevFuncEnv, - .initFunc = stddevFunctionSetup, - .processFunc = stddevFunction, - .finalizeFunc = stddevFinalize}, - {.name = "avg", - .type = FUNCTION_TYPE_AVG, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = getAvgFuncEnv, - .initFunc = avgFunctionSetup, - .processFunc = avgFunction, - .finalizeFunc = avgFinalize}, - {.name = "percentile", - .type = FUNCTION_TYPE_PERCENTILE, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translatePercentile, - .getEnvFunc = getPercentileFuncEnv, - .initFunc = percentileFunctionSetup, - .processFunc = percentileFunction, - .finalizeFunc = percentileFinalize}, - {.name = "apercentile", - .type = FUNCTION_TYPE_APERCENTILE, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateApercentile, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = maxFunctionSetup, - .processFunc = maxFunction, - .finalizeFunc = functionFinalize}, - { - .name = "top", - .type = FUNCTION_TYPE_TOP, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateTop, - .getEnvFunc = getTopBotFuncEnv, - .initFunc = functionSetup, - .processFunc = topFunction, - .finalizeFunc = topBotFinalize, - }, - {.name = "bottom", - .type = FUNCTION_TYPE_BOTTOM, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateBottom, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = maxFunctionSetup, - .processFunc = maxFunction, - .finalizeFunc = functionFinalize}, - {.name = "spread", - .type = FUNCTION_TYPE_SPREAD, - .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateSpread, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = maxFunctionSetup, - .processFunc = maxFunction, - .finalizeFunc = functionFinalize}, - {.name = "last_row", - .type = FUNCTION_TYPE_LAST_ROW, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, - .translateFunc = translateLastRow, - .getEnvFunc = getMinmaxFuncEnv, - .initFunc = maxFunctionSetup, - .processFunc = maxFunction, - .finalizeFunc = functionFinalize}, - {.name = "first", - .type = FUNCTION_TYPE_FIRST, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, - .translateFunc = translateFirstLast, - .getEnvFunc = getFirstLastFuncEnv, - .initFunc = functionSetup, - .processFunc = firstFunction, - .finalizeFunc = functionFinalize}, - {.name = "last", - .type = FUNCTION_TYPE_LAST, - .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, - .translateFunc = translateFirstLast, - .getEnvFunc = getFirstLastFuncEnv, - .initFunc = functionSetup, - .processFunc = lastFunction, - .finalizeFunc = functionFinalize}, - {.name = "diff", - .type = FUNCTION_TYPE_DIFF, - .classification = FUNC_MGT_NONSTANDARD_SQL_FUNC, - .translateFunc = translateInOutNum, - .getEnvFunc = getDiffFuncEnv, - .initFunc = diffFunctionSetup, - .processFunc = diffFunction, - .finalizeFunc = functionFinalize}, - {.name = "abs", - .type = FUNCTION_TYPE_ABS, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = absFunction, - .finalizeFunc = NULL}, - {.name = "log", - .type = FUNCTION_TYPE_LOG, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateIn2NumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = logFunction, - .finalizeFunc = NULL}, - {.name = "pow", - .type = FUNCTION_TYPE_POW, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateIn2NumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = powFunction, - .finalizeFunc = NULL}, - {.name = "sqrt", - .type = FUNCTION_TYPE_SQRT, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = sqrtFunction, - .finalizeFunc = NULL}, - {.name = "ceil", - .type = FUNCTION_TYPE_CEIL, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = ceilFunction, - .finalizeFunc = NULL}, - {.name = "floor", - .type = FUNCTION_TYPE_FLOOR, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = floorFunction, - .finalizeFunc = NULL}, - {.name = "round", - .type = FUNCTION_TYPE_ROUND, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = roundFunction, - .finalizeFunc = NULL}, - {.name = "sin", - .type = FUNCTION_TYPE_SIN, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = sinFunction, - .finalizeFunc = NULL}, - {.name = "cos", - .type = FUNCTION_TYPE_COS, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = cosFunction, - .finalizeFunc = NULL}, - {.name = "tan", - .type = FUNCTION_TYPE_TAN, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = tanFunction, - .finalizeFunc = NULL}, - {.name = "asin", - .type = FUNCTION_TYPE_ASIN, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = asinFunction, - .finalizeFunc = NULL}, - {.name = "acos", - .type = FUNCTION_TYPE_ACOS, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = acosFunction, - .finalizeFunc = NULL}, - {.name = "atan", - .type = FUNCTION_TYPE_ATAN, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = atanFunction, - .finalizeFunc = NULL}, - {.name = "length", - .type = FUNCTION_TYPE_LENGTH, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateLength, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = lengthFunction, - .finalizeFunc = NULL}, - {.name = "char_length", - .type = FUNCTION_TYPE_CHAR_LENGTH, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateLength, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = charLengthFunction, - .finalizeFunc = NULL}, - {.name = "concat", - .type = FUNCTION_TYPE_CONCAT, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateConcat, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = concatFunction, - .finalizeFunc = NULL}, - {.name = "concat_ws", - .type = FUNCTION_TYPE_CONCAT_WS, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateConcatWs, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = concatWsFunction, - .finalizeFunc = NULL}, - {.name = "lower", - .type = FUNCTION_TYPE_LOWER, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = lowerFunction, - .finalizeFunc = NULL}, - {.name = "upper", - .type = FUNCTION_TYPE_UPPER, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = upperFunction, - .finalizeFunc = NULL}, - {.name = "ltrim", - .type = FUNCTION_TYPE_LTRIM, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = ltrimFunction, - .finalizeFunc = NULL}, - {.name = "rtrim", - .type = FUNCTION_TYPE_RTRIM, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = rtrimFunction, - .finalizeFunc = NULL}, - {.name = "substr", - .type = FUNCTION_TYPE_SUBSTR, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateSubstr, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = substrFunction, - .finalizeFunc = NULL}, - {.name = "cast", - .type = FUNCTION_TYPE_CAST, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateCast, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = castFunction, - .finalizeFunc = NULL}, - {.name = "to_iso8601", - .type = FUNCTION_TYPE_TO_ISO8601, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateToIso8601, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = toISO8601Function, - .finalizeFunc = NULL}, - {.name = "to_unixtimestamp", - .type = FUNCTION_TYPE_TO_UNIXTIMESTAMP, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateToUnixtimestamp, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = toUnixtimestampFunction, - .finalizeFunc = NULL}, - {.name = "timetruncate", - .type = FUNCTION_TYPE_TIMETRUNCATE, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTimeTruncate, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = timeTruncateFunction, - .finalizeFunc = NULL}, - {.name = "timediff", - .type = FUNCTION_TYPE_TIMEDIFF, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTimeDiff, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = timeDiffFunction, - .finalizeFunc = NULL}, - {.name = "now", - .type = FUNCTION_TYPE_NOW, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = nowFunction, - .finalizeFunc = NULL}, - {.name = "today", - .type = FUNCTION_TYPE_TODAY, - .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = todayFunction, - .finalizeFunc = NULL}, - {.name = "timezone", - .type = FUNCTION_TYPE_TIMEZONE, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTimezone, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = timezoneFunction, - .finalizeFunc = NULL}, - {.name = "_rowts", - .type = FUNCTION_TYPE_ROWTS, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = NULL, - .finalizeFunc = NULL}, - {.name = "tbname", - .type = FUNCTION_TYPE_TBNAME, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC, - .translateFunc = translateTbnameColumn, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = NULL, - .finalizeFunc = NULL}, - {.name = "_qstartts", - .type = FUNCTION_TYPE_QSTARTTS, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = qStartTsFunction, - .finalizeFunc = NULL}, - {.name = "_qendts", - .type = FUNCTION_TYPE_QENDTS, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = qEndTsFunction, - .finalizeFunc = NULL}, - {.name = "_wstartts", - .type = FUNCTION_TYPE_WSTARTTS, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = winStartTsFunction, - .finalizeFunc = NULL}, - {.name = "_wendts", - .type = FUNCTION_TYPE_WENDTS, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, - .translateFunc = translateTimePseudoColumn, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = winEndTsFunction, - .finalizeFunc = NULL}, - {.name = "_wduration", - .type = FUNCTION_TYPE_WDURATION, - .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, - .translateFunc = translateWduration, - .getEnvFunc = getTimePseudoFuncEnv, - .initFunc = NULL, - .sprocessFunc = winDurFunction, - .finalizeFunc = NULL}, - {.name = "to_json", - .type = FUNCTION_TYPE_TO_JSON, - .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateToJson, - .getEnvFunc = NULL, - .initFunc = NULL, - .sprocessFunc = toJsonFunction, - .finalizeFunc = NULL}}; + { + .name = "count", + .type = FUNCTION_TYPE_COUNT, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, + .translateFunc = translateCount, + .dataRequiredFunc = countDataRequired, + .getEnvFunc = getCountFuncEnv, + .initFunc = functionSetup, + .processFunc = countFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "sum", + .type = FUNCTION_TYPE_SUM, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, + .translateFunc = translateSum, + .dataRequiredFunc = statisDataRequired, + .getEnvFunc = getSumFuncEnv, + .initFunc = functionSetup, + .processFunc = sumFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "min", + .type = FUNCTION_TYPE_MIN, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, + .translateFunc = translateInOutNum, + .dataRequiredFunc = statisDataRequired, + .getEnvFunc = getMinmaxFuncEnv, + .initFunc = minFunctionSetup, + .processFunc = minFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "max", + .type = FUNCTION_TYPE_MAX, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, + .translateFunc = translateInOutNum, + .dataRequiredFunc = statisDataRequired, + .getEnvFunc = getMinmaxFuncEnv, + .initFunc = maxFunctionSetup, + .processFunc = maxFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "stddev", + .type = FUNCTION_TYPE_STDDEV, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = getStddevFuncEnv, + .initFunc = stddevFunctionSetup, + .processFunc = stddevFunction, + .finalizeFunc = stddevFinalize + }, + { + .name = "avg", + .type = FUNCTION_TYPE_AVG, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = getAvgFuncEnv, + .initFunc = avgFunctionSetup, + .processFunc = avgFunction, + .finalizeFunc = avgFinalize + }, + { + .name = "percentile", + .type = FUNCTION_TYPE_PERCENTILE, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translatePercentile, + .getEnvFunc = getPercentileFuncEnv, + .initFunc = percentileFunctionSetup, + .processFunc = percentileFunction, + .finalizeFunc = percentileFinalize + }, + { + .name = "apercentile", + .type = FUNCTION_TYPE_APERCENTILE, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateApercentile, + .getEnvFunc = getMinmaxFuncEnv, + .initFunc = maxFunctionSetup, + .processFunc = maxFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "top", + .type = FUNCTION_TYPE_TOP, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateTop, + .getEnvFunc = getTopBotFuncEnv, + .initFunc = functionSetup, + .processFunc = topFunction, + .finalizeFunc = topBotFinalize, + }, + { + .name = "bottom", + .type = FUNCTION_TYPE_BOTTOM, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateBottom, + .getEnvFunc = getMinmaxFuncEnv, + .initFunc = maxFunctionSetup, + .processFunc = maxFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "spread", + .type = FUNCTION_TYPE_SPREAD, + .classification = FUNC_MGT_AGG_FUNC, + .translateFunc = translateSpread, + .dataRequiredFunc = statisDataRequired, + .getEnvFunc = getSpreadFuncEnv, + .initFunc = spreadFunctionSetup, + .processFunc = spreadFunction, + .finalizeFunc = spreadFinalize + }, + { + .name = "last_row", + .type = FUNCTION_TYPE_LAST_ROW, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, + .translateFunc = translateLastRow, + .getEnvFunc = getMinmaxFuncEnv, + .initFunc = maxFunctionSetup, + .processFunc = maxFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "first", + .type = FUNCTION_TYPE_FIRST, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, + .translateFunc = translateFirstLast, + .getEnvFunc = getFirstLastFuncEnv, + .initFunc = functionSetup, + .processFunc = firstFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "last", + .type = FUNCTION_TYPE_LAST, + .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC, + .translateFunc = translateFirstLast, + .getEnvFunc = getFirstLastFuncEnv, + .initFunc = functionSetup, + .processFunc = lastFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "diff", + .type = FUNCTION_TYPE_DIFF, + .classification = FUNC_MGT_NONSTANDARD_SQL_FUNC, + .translateFunc = translateInOutNum, + .getEnvFunc = getDiffFuncEnv, + .initFunc = diffFunctionSetup, + .processFunc = diffFunction, + .finalizeFunc = functionFinalize + }, + { + .name = "abs", + .type = FUNCTION_TYPE_ABS, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInOutNum, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = absFunction, + .finalizeFunc = NULL + }, + { + .name = "log", + .type = FUNCTION_TYPE_LOG, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateIn2NumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = logFunction, + .finalizeFunc = NULL + }, + { + .name = "pow", + .type = FUNCTION_TYPE_POW, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateIn2NumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = powFunction, + .finalizeFunc = NULL + }, + { + .name = "sqrt", + .type = FUNCTION_TYPE_SQRT, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = sqrtFunction, + .finalizeFunc = NULL + }, + { + .name = "ceil", + .type = FUNCTION_TYPE_CEIL, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInOutNum, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = ceilFunction, + .finalizeFunc = NULL + }, + { + .name = "floor", + .type = FUNCTION_TYPE_FLOOR, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInOutNum, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = floorFunction, + .finalizeFunc = NULL + }, + { + .name = "round", + .type = FUNCTION_TYPE_ROUND, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInOutNum, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = roundFunction, + .finalizeFunc = NULL + }, + { + .name = "sin", + .type = FUNCTION_TYPE_SIN, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = sinFunction, + .finalizeFunc = NULL + }, + { + .name = "cos", + .type = FUNCTION_TYPE_COS, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = cosFunction, + .finalizeFunc = NULL + }, + { + .name = "tan", + .type = FUNCTION_TYPE_TAN, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = tanFunction, + .finalizeFunc = NULL + }, + { + .name = "asin", + .type = FUNCTION_TYPE_ASIN, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = asinFunction, + .finalizeFunc = NULL + }, + { + .name = "acos", + .type = FUNCTION_TYPE_ACOS, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = acosFunction, + .finalizeFunc = NULL + }, + { + .name = "atan", + .type = FUNCTION_TYPE_ATAN, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateInNumOutDou, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = atanFunction, + .finalizeFunc = NULL + }, + { + .name = "length", + .type = FUNCTION_TYPE_LENGTH, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateLength, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = lengthFunction, + .finalizeFunc = NULL + }, + { + .name = "char_length", + .type = FUNCTION_TYPE_CHAR_LENGTH, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateLength, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = charLengthFunction, + .finalizeFunc = NULL + }, + { + .name = "concat", + .type = FUNCTION_TYPE_CONCAT, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateConcat, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = concatFunction, + .finalizeFunc = NULL + }, + { + .name = "concat_ws", + .type = FUNCTION_TYPE_CONCAT_WS, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateConcatWs, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = concatWsFunction, + .finalizeFunc = NULL + }, + { + .name = "lower", + .type = FUNCTION_TYPE_LOWER, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateInOutStr, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = lowerFunction, + .finalizeFunc = NULL + }, + { + .name = "upper", + .type = FUNCTION_TYPE_UPPER, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateInOutStr, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = upperFunction, + .finalizeFunc = NULL + }, + { + .name = "ltrim", + .type = FUNCTION_TYPE_LTRIM, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateInOutStr, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = ltrimFunction, + .finalizeFunc = NULL + }, + { + .name = "rtrim", + .type = FUNCTION_TYPE_RTRIM, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateInOutStr, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = rtrimFunction, + .finalizeFunc = NULL + }, + { + .name = "substr", + .type = FUNCTION_TYPE_SUBSTR, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .translateFunc = translateSubstr, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = substrFunction, + .finalizeFunc = NULL + }, + { + .name = "cast", + .type = FUNCTION_TYPE_CAST, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateCast, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = castFunction, + .finalizeFunc = NULL + }, + { + .name = "to_iso8601", + .type = FUNCTION_TYPE_TO_ISO8601, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateToIso8601, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = toISO8601Function, + .finalizeFunc = NULL + }, + { + .name = "to_unixtimestamp", + .type = FUNCTION_TYPE_TO_UNIXTIMESTAMP, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateToUnixtimestamp, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = toUnixtimestampFunction, + .finalizeFunc = NULL + }, + { + .name = "timetruncate", + .type = FUNCTION_TYPE_TIMETRUNCATE, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateTimeTruncate, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = timeTruncateFunction, + .finalizeFunc = NULL + }, + { + .name = "timediff", + .type = FUNCTION_TYPE_TIMEDIFF, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateTimeDiff, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = timeDiffFunction, + .finalizeFunc = NULL + }, + { + .name = "now", + .type = FUNCTION_TYPE_NOW, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = nowFunction, + .finalizeFunc = NULL + }, + { + .name = "today", + .type = FUNCTION_TYPE_TODAY, + .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = todayFunction, + .finalizeFunc = NULL + }, + { + .name = "timezone", + .type = FUNCTION_TYPE_TIMEZONE, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateTimezone, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = timezoneFunction, + .finalizeFunc = NULL + }, + { + .name = "_rowts", + .type = FUNCTION_TYPE_ROWTS, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = NULL, + .finalizeFunc = NULL + }, + { + .name = "tbname", + .type = FUNCTION_TYPE_TBNAME, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC, + .translateFunc = translateTbnameColumn, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = NULL, + .finalizeFunc = NULL + }, + { + .name = "_qstartts", + .type = FUNCTION_TYPE_QSTARTTS, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = qStartTsFunction, + .finalizeFunc = NULL + }, + { + .name = "_qendts", + .type = FUNCTION_TYPE_QENDTS, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = qEndTsFunction, + .finalizeFunc = NULL + }, + { + .name = "_wstartts", + .type = FUNCTION_TYPE_WSTARTTS, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = winStartTsFunction, + .finalizeFunc = NULL + }, + { + .name = "_wendts", + .type = FUNCTION_TYPE_WENDTS, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, + .translateFunc = translateTimePseudoColumn, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = winEndTsFunction, + .finalizeFunc = NULL + }, + { + .name = "_wduration", + .type = FUNCTION_TYPE_WDURATION, + .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC, + .translateFunc = translateWduration, + .getEnvFunc = getTimePseudoFuncEnv, + .initFunc = NULL, + .sprocessFunc = winDurFunction, + .finalizeFunc = NULL + }, + { + .name = "to_json", + .type = FUNCTION_TYPE_TO_JSON, + .classification = FUNC_MGT_SCALAR_FUNC, + .translateFunc = translateToJson, + .getEnvFunc = NULL, + .initFunc = NULL, + .sprocessFunc = toJsonFunction, + .finalizeFunc = NULL + } +}; const int32_t funcMgtBuiltinsNum = (sizeof(funcMgtBuiltins) / sizeof(SBuiltinFuncDefinition)); diff --git a/source/libs/function/src/builtinsimpl.c b/source/libs/function/src/builtinsimpl.c index 43fe6a06ed..f2597bbf0a 100644 --- a/source/libs/function/src/builtinsimpl.c +++ b/source/libs/function/src/builtinsimpl.c @@ -71,6 +71,13 @@ typedef struct SDiffInfo { union { int64_t i64; double d64;} prev; } SDiffInfo; +typedef struct SSpreadInfo { + double result; + bool hasResult; + double min; + double max; +} SSpreadInfo; + #define SET_VAL(_info, numOfElem, res) \ do { \ if ((numOfElem) <= 0) { \ @@ -1630,3 +1637,101 @@ int32_t topBotFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) { return pEntryInfo->numOfRes; } + +bool getSpreadFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv) { + pEnv->calcMemSize = sizeof(SSpreadInfo); + return true; +} + +bool spreadFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo) { + if (!functionSetup(pCtx, pResultInfo)) { + return false; + } + + SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(pResultInfo); + SET_DOUBLE_VAL(&pInfo->min, DBL_MAX); + SET_DOUBLE_VAL(&pInfo->max, -DBL_MAX); + pInfo->hasResult = false; + return true; +} + +int32_t spreadFunction(SqlFunctionCtx *pCtx) { + int32_t numOfElems = 0; + + // Only the pre-computing information loaded and actual data does not loaded + SInputColumnInfoData* pInput = &pCtx->input; + SColumnDataAgg *pAgg = pInput->pColumnDataAgg[0]; + int32_t type = pInput->pData[0]->info.type; + + SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx)); + + if (pInput->colDataAggIsSet) { + numOfElems = pInput->numOfRows - pAgg->numOfNull; + if (numOfElems == 0) { + goto _spread_over; + } + double tmin = 0.0, tmax = 0.0; + if (IS_SIGNED_NUMERIC_TYPE(type)) { + tmin = (double)GET_INT64_VAL(&pAgg->min); + tmax = (double)GET_INT64_VAL(&pAgg->max); + } else if (IS_FLOAT_TYPE(type)) { + tmin = GET_DOUBLE_VAL(&pAgg->min); + tmax = GET_DOUBLE_VAL(&pAgg->max); + } else if (IS_UNSIGNED_NUMERIC_TYPE(type)) { + tmin = (double)GET_UINT64_VAL(&pAgg->min); + tmax = (double)GET_UINT64_VAL(&pAgg->max); + } + + if (GET_DOUBLE_VAL(&pInfo->min) > tmin) { + SET_DOUBLE_VAL(&pInfo->min, tmin); + } + + if (GET_DOUBLE_VAL(&pInfo->max) < tmax) { + SET_DOUBLE_VAL(&pInfo->max, tmax); + } + + } else { // computing based on the true data block + SColumnInfoData* pCol = pInput->pData[0]; + + int32_t start = pInput->startRowIndex; + int32_t numOfRows = pInput->numOfRows; + + // check the valid data one by one + for (int32_t i = start; i < pInput->numOfRows + start; ++i) { + if (colDataIsNull_f(pCol->nullbitmap, i)) { + continue; + } + + char *data = colDataGetData(pCol, i); + + double v = 0; + GET_TYPED_DATA(v, double, type, data); + if (v < GET_DOUBLE_VAL(&pInfo->min)) { + SET_DOUBLE_VAL(&pInfo->min, v); + } + + if (v > GET_DOUBLE_VAL(&pInfo->max)) { + SET_DOUBLE_VAL(&pInfo->max, v); + } + + numOfElems += 1; + } + } + +_spread_over: + // data in the check operation are all null, not output + SET_VAL(GET_RES_INFO(pCtx), numOfElems, 1); + if (numOfElems > 0) { + pInfo->hasResult = true; + } + + return TSDB_CODE_SUCCESS; +} + +int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) { + SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx)); + if (pInfo->hasResult == true) { + SET_DOUBLE_VAL(&pInfo->result, pInfo->max - pInfo->min); + } + return functionFinalize(pCtx, pBlock); +} From 60b05c8a5180ff9b4c0f0667cee750584ac46434 Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 28 Apr 2022 23:32:14 +0800 Subject: [PATCH 28/33] refactor(index): refactor index code --- source/libs/executor/src/indexoperator.c | 38 +++++++++++------------- 1 file changed, 17 insertions(+), 21 deletions(-) diff --git a/source/libs/executor/src/indexoperator.c b/source/libs/executor/src/indexoperator.c index 34b04c04de..1f8e73b80a 100644 --- a/source/libs/executor/src/indexoperator.c +++ b/source/libs/executor/src/indexoperator.c @@ -54,7 +54,9 @@ typedef struct SIFParam { typedef int32_t (*sif_func_t)(SNode *left, SNode *rigth, SIFParam *output); // construct tag filter operator later -static void destroyTagFilterOperatorInfo(void *param) { STagFilterOperatorInfo *pInfo = (STagFilterOperatorInfo *)param; } +static void destroyTagFilterOperatorInfo(void *param) { + STagFilterOperatorInfo *pInfo = (STagFilterOperatorInfo *)param; +} static void sifFreeParam(SIFParam *param) { if (param == NULL) return; @@ -62,8 +64,9 @@ static void sifFreeParam(SIFParam *param) { } static int32_t sifGetOperParamNum(EOperatorType ty) { - if (OP_TYPE_IS_NULL == ty || OP_TYPE_IS_NOT_NULL == ty || OP_TYPE_IS_TRUE == ty || OP_TYPE_IS_NOT_TRUE == ty || OP_TYPE_IS_FALSE == ty || - OP_TYPE_IS_NOT_FALSE == ty || OP_TYPE_IS_UNKNOWN == ty || OP_TYPE_IS_NOT_UNKNOWN == ty || OP_TYPE_MINUS == ty) { + if (OP_TYPE_IS_NULL == ty || OP_TYPE_IS_NOT_NULL == ty || OP_TYPE_IS_TRUE == ty || OP_TYPE_IS_NOT_TRUE == ty || + OP_TYPE_IS_FALSE == ty || OP_TYPE_IS_NOT_FALSE == ty || OP_TYPE_IS_UNKNOWN == ty || + OP_TYPE_IS_NOT_UNKNOWN == ty || OP_TYPE_MINUS == ty) { return 1; } return 2; @@ -267,7 +270,8 @@ _return: static int32_t sifExecLogic(SLogicConditionNode *node, SIFCtx *ctx, SIFParam *output) { if (NULL == node->pParameterList || node->pParameterList->length <= 0) { - qError("invalid logic parameter list, list:%p, paramNum:%d", node->pParameterList, node->pParameterList ? node->pParameterList->length : 0); + qError("invalid logic parameter list, list:%p, paramNum:%d", node->pParameterList, + node->pParameterList ? node->pParameterList->length : 0); return TSDB_CODE_QRY_INVALID_INPUT; } @@ -341,7 +345,8 @@ static EDealRes sifWalkOper(SNode *pNode, void *context) { } EDealRes sifCalcWalker(SNode *node, void *context) { - if (QUERY_NODE_VALUE == nodeType(node) || QUERY_NODE_NODE_LIST == nodeType(node) || QUERY_NODE_COLUMN == nodeType(node)) { + if (QUERY_NODE_VALUE == nodeType(node) || QUERY_NODE_NODE_LIST == nodeType(node) || + QUERY_NODE_COLUMN == nodeType(node)) { return DEAL_RES_CONTINUE; } SIFCtx *ctx = (SIFCtx *)context; @@ -383,21 +388,20 @@ static int32_t sifCalculate(SNode *pNode, SIFParam *pDst) { return TSDB_CODE_QRY_OUT_OF_MEMORY; } nodesWalkExprPostOrder(pNode, sifCalcWalker, &ctx); - if (ctx.code != TSDB_CODE_SUCCESS) { - return ctx.code; - } + SIF_ERR_RET(ctx.code); + if (pDst) { SIFParam *res = (SIFParam *)taosHashGet(ctx.pRes, (void *)&pNode, POINTER_BYTES); if (res == NULL) { qError("no valid res in hash, node:(%p), type(%d)", (void *)&pNode, nodeType(pNode)); - return TSDB_CODE_QRY_APP_ERROR; + SIF_ERR_RET(TSDB_CODE_QRY_APP_ERROR); } taosArrayAddAll(pDst->result, res->result); sifFreeParam(res); taosHashRemove(ctx.pRes, (void *)&pNode, POINTER_BYTES); } - return TSDB_CODE_SUCCESS; + SIF_RET(code); } int32_t doFilterTag(const SNode *pFilterNode, SArray *result) { @@ -407,22 +411,14 @@ int32_t doFilterTag(const SNode *pFilterNode, SArray *result) { SFilterInfo *filter = NULL; // todo move to the initialization function - int32_t code = filterInitFromNode((SNode *)pFilterNode, &filter, 0); - if (code != TSDB_CODE_SUCCESS) { - return code; - } + SIF_ERR_RET(filterInitFromNode((SNode *)pFilterNode, &filter, 0)); SIFParam param = {0}; - code = sifCalculate((SNode *)pFilterNode, ¶m); - - if (code != TSDB_CODE_SUCCESS) { - return code; - } + SIF_ERR_RET(sifCalculate((SNode *)pFilterNode, ¶m)); taosArrayAddAll(result, param.result); sifFreeParam(¶m); - - return code; + SIF_RET(TSDB_CODE_SUCCESS); } SIdxFltStatus idxGetFltStatus(SNode *pFilterNode) { From 35fccc49d8da8b7a49c864e6b428b9d9b8cd8ac3 Mon Sep 17 00:00:00 2001 From: Haojun Liao Date: Fri, 29 Apr 2022 08:48:18 +0800 Subject: [PATCH 29/33] refactor(query): remove redundant attributes in some structs. --- source/dnode/mnode/impl/inc/mndDef.h | 3 -- source/dnode/mnode/impl/src/mndBnode.c | 2 +- source/dnode/mnode/impl/src/mndCluster.c | 2 +- source/dnode/mnode/impl/src/mndDb.c | 7 +-- source/dnode/mnode/impl/src/mndDnode.c | 2 +- source/dnode/mnode/impl/src/mndFunc.c | 8 +-- source/dnode/mnode/impl/src/mndMnode.c | 4 +- source/dnode/mnode/impl/src/mndProfile.c | 65 ++++++++++++------------ source/dnode/mnode/impl/src/mndQnode.c | 2 +- source/dnode/mnode/impl/src/mndShow.c | 10 ---- source/dnode/mnode/impl/src/mndSnode.c | 2 +- source/dnode/mnode/impl/src/mndTrans.c | 8 +-- source/dnode/mnode/impl/src/mndUser.c | 2 +- source/dnode/mnode/impl/src/mndVgroup.c | 2 +- 14 files changed, 54 insertions(+), 65 deletions(-) diff --git a/source/dnode/mnode/impl/inc/mndDef.h b/source/dnode/mnode/impl/inc/mndDef.h index 4f73f2561e..118fbf1a03 100644 --- a/source/dnode/mnode/impl/inc/mndDef.h +++ b/source/dnode/mnode/impl/inc/mndDef.h @@ -388,15 +388,12 @@ typedef struct { int8_t type; int8_t replica; int16_t numOfColumns; - int32_t rowSize; int32_t numOfRows; void* pIter; SMnode* pMnode; STableMetaRsp* pMeta; bool sysDbRsp; char db[TSDB_DB_FNAME_LEN]; - int16_t offset[TSDB_MAX_COLUMNS]; - int32_t bytes[TSDB_MAX_COLUMNS]; } SShowObj; typedef struct { diff --git a/source/dnode/mnode/impl/src/mndBnode.c b/source/dnode/mnode/impl/src/mndBnode.c index 168bd1db76..34d1223c0a 100644 --- a/source/dnode/mnode/impl/src/mndBnode.c +++ b/source/dnode/mnode/impl/src/mndBnode.c @@ -453,7 +453,7 @@ static int32_t mndRetrieveBnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false); char buf[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(buf, pObj->pDnode->ep, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, buf, false); diff --git a/source/dnode/mnode/impl/src/mndCluster.c b/source/dnode/mnode/impl/src/mndCluster.c index 57326b26ce..257db0dcd4 100644 --- a/source/dnode/mnode/impl/src/mndCluster.c +++ b/source/dnode/mnode/impl/src/mndCluster.c @@ -194,7 +194,7 @@ static int32_t mndRetrieveClusters(SNodeMsg *pMsg, SShowObj *pShow, SSDataBlock colDataAppend(pColInfo, numOfRows, (const char*) &pCluster->id, false); char buf[tListLen(pCluster->name) + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(buf, pCluster->name, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf, pCluster->name, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, buf, false); diff --git a/source/dnode/mnode/impl/src/mndDb.c b/source/dnode/mnode/impl/src/mndDb.c index e615860e68..442bfb5c8a 100644 --- a/source/dnode/mnode/impl/src/mndDb.c +++ b/source/dnode/mnode/impl/src/mndDb.c @@ -1387,12 +1387,13 @@ static void dumpDbInfoData(SSDataBlock *pBlock, SDbObj *pDb, SShowObj *pShow, in bool sysDb) { int32_t cols = 0; - char *buf = taosMemoryMalloc(pShow->bytes[cols]); + int32_t bytes = pShow->pMeta->pSchemas[cols].bytes; + char *buf = taosMemoryMalloc(bytes); const char *name = mndGetDbStr(pDb->name); if (name != NULL) { - STR_WITH_MAXSIZE_TO_VARSTR(buf, name, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf, name, bytes); } else { - STR_WITH_MAXSIZE_TO_VARSTR(buf, "NULL", pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf, "NULL", bytes); } char *status = "ready"; diff --git a/source/dnode/mnode/impl/src/mndDnode.c b/source/dnode/mnode/impl/src/mndDnode.c index 3eeec61dbb..a55cf41262 100644 --- a/source/dnode/mnode/impl/src/mndDnode.c +++ b/source/dnode/mnode/impl/src/mndDnode.c @@ -705,7 +705,7 @@ static int32_t mndRetrieveDnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p colDataAppend(pColInfo, numOfRows, (const char *)&pDnode->id, false); char buf[tListLen(pDnode->ep) + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(buf, pDnode->ep, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf, pDnode->ep, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, buf, false); diff --git a/source/dnode/mnode/impl/src/mndFunc.c b/source/dnode/mnode/impl/src/mndFunc.c index 09d0587019..b9331e6e03 100644 --- a/source/dnode/mnode/impl/src/mndFunc.c +++ b/source/dnode/mnode/impl/src/mndFunc.c @@ -517,14 +517,14 @@ static int32_t mndRetrieveFuncs(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB cols = 0; char b1[tListLen(pFunc->name) + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(b1, pFunc->name, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(b1, pFunc->name, pShow->pMeta->pSchemas[cols].bytes); SColumnInfoData *pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)b1, false); if (pFunc->pComment) { - char *b2 = taosMemoryCalloc(1, pShow->bytes[cols]); - STR_WITH_MAXSIZE_TO_VARSTR(b2, pFunc->pComment, pShow->bytes[cols]); + char *b2 = taosMemoryCalloc(1, pShow->pMeta->pSchemas[cols].bytes); + STR_WITH_MAXSIZE_TO_VARSTR(b2, pFunc->pComment, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)b2, false); @@ -540,7 +540,7 @@ static int32_t mndRetrieveFuncs(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB char b3[TSDB_TYPE_STR_MAX_LEN] = {0}; STR_WITH_MAXSIZE_TO_VARSTR(b3, mnodeGenTypeStr(buf, TSDB_TYPE_STR_MAX_LEN, pFunc->outputType, pFunc->outputLen), - pShow->bytes[cols]); + pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)b3, false); diff --git a/source/dnode/mnode/impl/src/mndMnode.c b/source/dnode/mnode/impl/src/mndMnode.c index b51c545a6d..465bf4d5a1 100644 --- a/source/dnode/mnode/impl/src/mndMnode.c +++ b/source/dnode/mnode/impl/src/mndMnode.c @@ -619,14 +619,14 @@ static int32_t mndRetrieveMnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false); char b1[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(b1, pObj->pDnode->ep, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(b1, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, b1, false); const char *roles = syncStr(pObj->role); char *b2 = taosMemoryCalloc(1, strlen(roles) + VARSTR_HEADER_SIZE); - STR_WITH_MAXSIZE_TO_VARSTR(b2, roles, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(b2, roles, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)b2, false); diff --git a/source/dnode/mnode/impl/src/mndProfile.c b/source/dnode/mnode/impl/src/mndProfile.c index 7bd22c2de5..59d07fd4aa 100644 --- a/source/dnode/mnode/impl/src/mndProfile.c +++ b/source/dnode/mnode/impl/src/mndProfile.c @@ -570,38 +570,39 @@ static int32_t mndRetrieveConns(SNodeMsg *pReq, SShowObj *pShow, char *data, int if (pConn == NULL) break; cols = 0; - - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; +#if 0 + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(uint32_t *)pWrite = pConn->id; cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->pMeta->pSchemas[cols].bytes); cols++; // app name - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->app, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->app, pShow->pMeta->pSchemas[cols].bytes); cols++; // app pid - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int32_t *)pWrite = pConn->pid; cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; taosIpPort2String(pConn->ip, pConn->port, ipStr); - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, ipStr, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, ipStr, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int64_t *)pWrite = pConn->loginTimeMs; cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; if (pConn->lastAccessTimeMs < pConn->loginTimeMs) pConn->lastAccessTimeMs = pConn->loginTimeMs; *(int64_t *)pWrite = pConn->lastAccessTimeMs; cols++; +#endif numOfRows++; } @@ -643,67 +644,67 @@ static int32_t mndRetrieveQueries(SNodeMsg *pReq, SShowObj *pShow, char *data, i SQueryDesc *pDesc = pConn->pQueries + i; cols = 0; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int64_t *)pWrite = htobe64(pDesc->queryId); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int64_t *)pWrite = htobe64(pConn->id); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; snprintf(str, tListLen(str), "%s:%u", taosIpStr(pConn->ip), pConn->port); - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->pMeta->pSchemas[cols].bytes); cols++; char handleBuf[24] = {0}; snprintf(handleBuf, tListLen(handleBuf), "%" PRIu64, htobe64(pDesc->qId)); - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, handleBuf, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, handleBuf, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int64_t *)pWrite = htobe64(pDesc->stime); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int64_t *)pWrite = htobe64(pDesc->useconds); cols++; snprintf(str, tListLen(str), "0x%" PRIx64, htobe64(pDesc->sqlObjId)); - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int32_t *)pWrite = htonl(pDesc->pid); cols++; char epBuf[TSDB_EP_LEN + 1] = {0}; snprintf(epBuf, tListLen(epBuf), "%s:%u", pDesc->fqdn, pConn->port); - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, epBuf, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, epBuf, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(bool *)pWrite = pDesc->stableQuery; cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; *(int32_t *)pWrite = htonl(pDesc->numOfSub); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->subSqlInfo, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->subSqlInfo, pShow->pMeta->pSchemas[cols].bytes); cols++; - pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows; - STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->sql, pShow->bytes[cols]); + pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows; + STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->sql, pShow->pMeta->pSchemas[cols].bytes); cols++; numOfRows++; diff --git a/source/dnode/mnode/impl/src/mndQnode.c b/source/dnode/mnode/impl/src/mndQnode.c index cec5933ba3..ae223c34b4 100644 --- a/source/dnode/mnode/impl/src/mndQnode.c +++ b/source/dnode/mnode/impl/src/mndQnode.c @@ -517,7 +517,7 @@ static int32_t mndRetrieveQnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false); char ep[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)ep, false); diff --git a/source/dnode/mnode/impl/src/mndShow.c b/source/dnode/mnode/impl/src/mndShow.c index fdc19c656a..be333d154a 100644 --- a/source/dnode/mnode/impl/src/mndShow.c +++ b/source/dnode/mnode/impl/src/mndShow.c @@ -209,16 +209,6 @@ static int32_t mndProcessRetrieveSysTableReq(SNodeMsg *pReq) { pShow->pMeta = pMeta; pShow->numOfColumns = pShow->pMeta->numOfColumns; - int32_t offset = 0; - - for (int32_t i = 0; i < pShow->pMeta->numOfColumns; ++i) { - pShow->offset[i] = offset; - - int32_t bytes = pShow->pMeta->pSchemas[i].bytes; - pShow->rowSize += bytes; - pShow->bytes[i] = bytes; - offset += bytes; - } } else { pShow = mndAcquireShowObj(pMnode, retrieveReq.showId); if (pShow == NULL) { diff --git a/source/dnode/mnode/impl/src/mndSnode.c b/source/dnode/mnode/impl/src/mndSnode.c index b7d0ed8f5a..58db4772e7 100644 --- a/source/dnode/mnode/impl/src/mndSnode.c +++ b/source/dnode/mnode/impl/src/mndSnode.c @@ -463,7 +463,7 @@ static int32_t mndRetrieveSnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false); char ep[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)ep, false); diff --git a/source/dnode/mnode/impl/src/mndTrans.c b/source/dnode/mnode/impl/src/mndTrans.c index 2d10c4a7a5..bbd952b6d0 100644 --- a/source/dnode/mnode/impl/src/mndTrans.c +++ b/source/dnode/mnode/impl/src/mndTrans.c @@ -1351,17 +1351,17 @@ static int32_t mndRetrieveTrans(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB colDataAppend(pColInfo, numOfRows, (const char *)&pTrans->createdTime, false); char stage[TSDB_TRANS_STAGE_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(stage, mndTransStr(pTrans->stage), pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(stage, mndTransStr(pTrans->stage), pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)stage, false); char dbname[TSDB_DB_NAME_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndGetDbStr(pTrans->dbname), pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndGetDbStr(pTrans->dbname), pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)dbname, false); char transType[TSDB_TRANS_TYPE_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndTransType(pTrans->transType), pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndTransType(pTrans->transType), pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)transType, false); @@ -1369,7 +1369,7 @@ static int32_t mndRetrieveTrans(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB colDataAppend(pColInfo, numOfRows, (const char *)&pTrans->lastExecTime, false); char lastError[TSDB_TRANS_ERROR_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(dbname, pTrans->lastError, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(dbname, pTrans->lastError, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)lastError, false); diff --git a/source/dnode/mnode/impl/src/mndUser.c b/source/dnode/mnode/impl/src/mndUser.c index b71e5e4f17..2ced813003 100644 --- a/source/dnode/mnode/impl/src/mndUser.c +++ b/source/dnode/mnode/impl/src/mndUser.c @@ -657,7 +657,7 @@ static int32_t mndRetrieveUsers(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB SColumnInfoData *pColInfo = taosArrayGet(pBlock->pDataBlock, cols); char name[TSDB_USER_LEN + VARSTR_HEADER_SIZE] = {0}; - STR_WITH_MAXSIZE_TO_VARSTR(name, pUser->user, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(name, pUser->user, pShow->pMeta->pSchemas[cols].bytes); colDataAppend(pColInfo, numOfRows, (const char *)name, false); diff --git a/source/dnode/mnode/impl/src/mndVgroup.c b/source/dnode/mnode/impl/src/mndVgroup.c index d76549b2ac..31e4a8ea7d 100644 --- a/source/dnode/mnode/impl/src/mndVgroup.c +++ b/source/dnode/mnode/impl/src/mndVgroup.c @@ -540,7 +540,7 @@ static int32_t mndRetrieveVgroups(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock* char buf1[20] = {0}; const char *role = syncStr(pVgroup->vnodeGid[i].role); - STR_WITH_MAXSIZE_TO_VARSTR(buf1, role, pShow->bytes[cols]); + STR_WITH_MAXSIZE_TO_VARSTR(buf1, role, pShow->pMeta->pSchemas[cols].bytes); pColInfo = taosArrayGet(pBlock->pDataBlock, cols++); colDataAppend(pColInfo, numOfRows, (const char *)buf1, false); From 2a696d2790f1a0b0da6cb6df89d5a5ce14b28a42 Mon Sep 17 00:00:00 2001 From: Haojun Liao Date: Fri, 29 Apr 2022 08:49:31 +0800 Subject: [PATCH 30/33] fix(query): add one more attribute in SSDataBlock. --- include/common/tcommon.h | 14 ++++++-------- include/libs/function/function.h | 2 +- source/libs/executor/src/scanoperator.c | 8 -------- 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/include/common/tcommon.h b/include/common/tcommon.h index fa428a994a..09a821ef06 100644 --- a/include/common/tcommon.h +++ b/include/common/tcommon.h @@ -65,14 +65,12 @@ typedef struct SDataBlockInfo { STimeWindow window; int32_t rows; int32_t rowSize; - union { - int64_t uid; // from which table of uid, comes from this data block - int64_t blockId; - }; - uint64_t groupId; // no need to serialize - int16_t numOfCols; - int16_t hasVarCol; - int16_t capacity; + int64_t uid; // the uid of table, from which current data block comes + int64_t blockId; // block id, generated by physical planner + uint64_t groupId; // no need to serialize + int16_t numOfCols; + int16_t hasVarCol; + int16_t capacity; } SDataBlockInfo; typedef struct SSDataBlock { diff --git a/include/libs/function/function.h b/include/libs/function/function.h index 094ce80106..347303a051 100644 --- a/include/libs/function/function.h +++ b/include/libs/function/function.h @@ -165,7 +165,7 @@ typedef struct SInputColumnInfoData { SColumnInfoData *pPTS; // primary timestamp column SColumnInfoData **pData; SColumnDataAgg **pColumnDataAgg; - uint64_t uid; // table uid + uint64_t uid; // table uid, used to set the tag value when building the final query result for selectivity functions. } SInputColumnInfoData; // sql function runtime context diff --git a/source/libs/executor/src/scanoperator.c b/source/libs/executor/src/scanoperator.c index f553af7995..dc87b864f1 100644 --- a/source/libs/executor/src/scanoperator.c +++ b/source/libs/executor/src/scanoperator.c @@ -184,8 +184,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo, qDebug("%s data block skipped, brange:%" PRId64 "-%" PRId64 ", rows:%d", GET_TASKID(pTaskInfo), pBlockInfo->window.skey, pBlockInfo->window.ekey, pBlockInfo->rows); pCost->skipBlocks += 1; - - pBlock->info.blockId = 0; return TSDB_CODE_SUCCESS; } else if (*status == FUNC_DATA_REQUIRED_STATIS_LOAD) { pCost->loadBlockStatis += 1; @@ -206,7 +204,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo, pBlock->pBlockAgg[pColMatchInfo->targetSlotId] = pColAgg[i]; } - pBlock->info.blockId = 0; return TSDB_CODE_SUCCESS; } else { // failed to load the block sma data, data block statistics does not exist, load data block instead *status = FUNC_DATA_REQUIRED_DATA_LOAD; @@ -235,11 +232,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo, } relocateColumnData(pBlock, pTableScanInfo->pColMatchInfo, pCols); - - // reset the block to be 0 by default, this blockId is assigned by physical plan and is used by direct upstream - // operator. - pBlock->info.blockId = 0; - doFilter(pTableScanInfo->pFilterNode, pBlock); if (pBlock->info.rows == 0) { pCost->filterOutBlocks += 1; From 2dbf20514a319633d212bc9a1bc048bcd9b15dab Mon Sep 17 00:00:00 2001 From: cpwu Date: Fri, 29 Apr 2022 10:20:25 +0800 Subject: [PATCH 31/33] fix case --- tests/system-test/2-query/char_length.py | 4 ++-- tests/system-test/2-query/join.py | 4 ++-- tests/system-test/2-query/length.py | 4 ++-- tests/system-test/2-query/lower.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index 31a58c2b26..d91b0349cc 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -152,10 +152,10 @@ class TDTestCase: f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index 14b2c3978d..671a926cc2 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -153,10 +153,10 @@ class TDTestCase: f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 57a8bf71f3..88b4236ca8 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -153,10 +153,10 @@ class TDTestCase: f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index 0af328fb32..ec42ed3089 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -150,10 +150,10 @@ class TDTestCase: f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values From b69ac45054f9b646c01ef2d4846c5884e9ff22f0 Mon Sep 17 00:00:00 2001 From: cpwu Date: Fri, 29 Apr 2022 11:00:45 +0800 Subject: [PATCH 32/33] fix case --- tests/system-test/2-query/char_length.py | 2 +- tests/system-test/2-query/join.py | 2 +- tests/system-test/2-query/length.py | 2 +- tests/system-test/2-query/lower.py | 2 +- tests/system-test/2-query/sum.py | 6 +-- tests/system-test/2-query/upper.py | 61 ++++++++++++------------ 6 files changed, 38 insertions(+), 37 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index d91b0349cc..200c564129 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -184,7 +184,7 @@ class TDTestCase: f'''insert into ct2 values ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index 671a926cc2..c4025c574c 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -185,7 +185,7 @@ class TDTestCase: f'''insert into ct2 values ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index 88b4236ca8..dd868046ad 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -185,7 +185,7 @@ class TDTestCase: f'''insert into ct2 values ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index ec42ed3089..4643a0d244 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -182,7 +182,7 @@ class TDTestCase: f'''insert into ct2 values ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index e9728abe63..dbd17ed509 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -138,10 +138,10 @@ class TDTestCase: f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( { now_time - i * 776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( { now_time - i * 776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( f'''insert into ct1 values @@ -170,7 +170,7 @@ class TDTestCase: f'''insert into ct2 values ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time + 777600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index 52cc8d4ca9..850c9d083b 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -143,74 +143,75 @@ class TDTestCase: tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') def __insert_data(self, rows): - for i in range(9): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): tdSql.execute( - f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )" + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )" ) tdSql.execute( - '''insert into ct1 values - ( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a ) - ( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a ) + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } ) ''' ) tdSql.execute( f'''insert into ct4 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, - { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000} ) ( - now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, - { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000} ) ''' ) tdSql.execute( f'''insert into ct2 values - ( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( - now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, - { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", now()-1d + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, - { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", now()-2d + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) for i in range(rows): insert_data = f'''insert into t1 values - ( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, - "binary_{i}", "nchar_{i}", now()-{i}s ) + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_{i}", { now_time - 1000 * i } ) ''' tdSql.execute(insert_data) tdSql.execute( f'''insert into t1 values - ( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, - "binary_limit-1", "nchar_limit-1", now()-1d + "binary_limit-1", "nchar_limit-1", { now_time - 86400000 } ) ( - now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, - "binary_limit-2", "nchar_limit-2", now()-2d + "binary_limit-2", "nchar_limit-2", { now_time - 172800000 } ) ''' ) From 9d78a349fb1307fa503714b830844ca034d078e0 Mon Sep 17 00:00:00 2001 From: cpwu Date: Fri, 29 Apr 2022 11:08:27 +0800 Subject: [PATCH 33/33] fix case --- tests/system-test/2-query/char_length.py | 2 +- tests/system-test/2-query/join.py | 2 +- tests/system-test/2-query/length.py | 2 +- tests/system-test/2-query/lower.py | 2 +- tests/system-test/2-query/sum.py | 2 +- tests/system-test/2-query/upper.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/system-test/2-query/char_length.py b/tests/system-test/2-query/char_length.py index 200c564129..e78db3b8b0 100644 --- a/tests/system-test/2-query/char_length.py +++ b/tests/system-test/2-query/char_length.py @@ -205,7 +205,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index c4025c574c..b4878e42c9 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -206,7 +206,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, diff --git a/tests/system-test/2-query/length.py b/tests/system-test/2-query/length.py index dd868046ad..083bc62c9a 100644 --- a/tests/system-test/2-query/length.py +++ b/tests/system-test/2-query/length.py @@ -206,7 +206,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, diff --git a/tests/system-test/2-query/lower.py b/tests/system-test/2-query/lower.py index 4643a0d244..5445c37b8a 100644 --- a/tests/system-test/2-query/lower.py +++ b/tests/system-test/2-query/lower.py @@ -203,7 +203,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, diff --git a/tests/system-test/2-query/sum.py b/tests/system-test/2-query/sum.py index dbd17ed509..9521b005dd 100644 --- a/tests/system-test/2-query/sum.py +++ b/tests/system-test/2-query/sum.py @@ -191,7 +191,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, diff --git a/tests/system-test/2-query/upper.py b/tests/system-test/2-query/upper.py index 850c9d083b..3c3fddfb45 100644 --- a/tests/system-test/2-query/upper.py +++ b/tests/system-test/2-query/upper.py @@ -202,7 +202,7 @@ class TDTestCase: tdSql.execute( f'''insert into t1 values ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) - ( { now_time - (( rows // 2 ) * 60 + 30) * 1800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },