From bf4757ccd48b53f4f86ea7d2d1255df759f2a0ee Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 15:40:43 +0800 Subject: [PATCH 01/25] add concat case --- tests/system-test/2-query/concat.py | 268 ++++++++++++++++++++++++++++ 1 file changed, 268 insertions(+) create mode 100644 tests/system-test/2-query/concat.py diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py new file mode 100644 index 0000000000..2a258506c7 --- /dev/null +++ b/tests/system-test/2-query/concat.py @@ -0,0 +1,268 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +PRIMARY_COL = "ts" + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +BOOLEAN_COL = [ BOOL_COL, ] +TS_TYPE_COL = [ TS_COL, ] + + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __concat_condition(self): # sourcery skip: extract-method + concat_condition = [] + for char_col in CHAR_COL: + concat_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + concat_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL) + concat_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL ) + concat_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + concat_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + concat_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) + + for num_col in NUM_COL: + concat_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + + concat_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) + + concat_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __concat_num(self, concat_lists, num): + concat_list = [] + for i in range(num): + concat_list[i] = concat_lists[i] + return concat_list + + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __concat_current_check(self, tbname, num): + concat_condition = self.__concat_condition() + for i in range(len(concat_condition) - num + 1 ): + condition = self.__concat_num(num) + where_condition = self.__where_condition(condition[0]) + group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) + group_no_having= self.__group_condition(condition[0] ) + groups = ["", group_having, group_no_having] + + for group_condition in groups: + tdSql.query(f"select concat( {','.join( condition ) } ), {','.join(condition)} from {tbname} {where_condition} {group_condition} ") + for i in range(tdSql.queryRows): + tdSql.checkData(i, 0, "".join(tdSql.queryResult[i][1:])) + + + + + def __concat_err_check(self,tbname): + sqls = [] + + for un_char_col in NUM_COL: + sqls.extend( + ( + f"select concat( {un_char_col} ) from {tbname} ", + f"select concat(ceil( {un_char_col} )) from {tbname} ", + f"select {un_char_col} from {tbname} group by concat( {un_char_col} ) ", + ) + ) + + sqls.extend( f"select concat( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in NUM_COL ) + sqls.extend( f"select concat( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + + sqls.extend( f"select {char_col} from {tbname} group by concat( {char_col} ) " for char_col in CHAR_COL) + sqls.extend( f"select concat( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat( {char_col} + {ts_col} ) from {tbname} " for char_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select concat( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select concat() from {tbname} ", + f"select concat(*) from {tbname} ", + f"select concat(ccccccc) from {tbname} ", + f"select concat(111) from {tbname} ", + f"select concat(c8, 11) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + self.__concat_current_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__concat_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.rows = 10 + self.__insert_data(self.rows) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From 65a1cc542ae076e9cf94c2e01ac7cd0f82a0bc42 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 15:43:24 +0800 Subject: [PATCH 02/25] fix case --- tests/system-test/2-query/concat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 2a258506c7..d3547a317f 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -125,7 +125,7 @@ class TDTestCase: tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - self.__concat_current_check(tb) + self.__concat_current_check(tb,2) tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): From 1ab622438a2d1a87121d259cf4a40fe62b90bd68 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 15:46:25 +0800 Subject: [PATCH 03/25] fix case --- tests/system-test/2-query/concat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index d3547a317f..940567c6d6 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -74,7 +74,7 @@ class TDTestCase: def __concat_current_check(self, tbname, num): concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): - condition = self.__concat_num(num) + condition = self.__concat_num(concat_condition[i:], num) where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) group_no_having= self.__group_condition(condition[0] ) From 8052b48c349af05a7094cff218867b22d9291d99 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 17:26:30 +0800 Subject: [PATCH 04/25] fix case --- tests/system-test/2-query/concat.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 940567c6d6..09bcb43634 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -74,7 +74,9 @@ class TDTestCase: def __concat_current_check(self, tbname, num): concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): + print(1111111111,i) condition = self.__concat_num(concat_condition[i:], num) + print(222222222222) where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) group_no_having= self.__group_condition(condition[0] ) @@ -82,8 +84,9 @@ class TDTestCase: for group_condition in groups: tdSql.query(f"select concat( {','.join( condition ) } ), {','.join(condition)} from {tbname} {where_condition} {group_condition} ") - for i in range(tdSql.queryRows): - tdSql.checkData(i, 0, "".join(tdSql.queryResult[i][1:])) + for j in range(tdSql.queryRows): + print(333333333) + tdSql.checkData(j, 0, "".join(tdSql.queryResult[i][1:])) From e77bae98689c1429d1d9367ed41f652aef58585e Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 17:36:36 +0800 Subject: [PATCH 05/25] fix case --- tests/system-test/2-query/concat.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 09bcb43634..ca62186125 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -62,10 +62,7 @@ class TDTestCase: return "" def __concat_num(self, concat_lists, num): - concat_list = [] - for i in range(num): - concat_list[i] = concat_lists[i] - return concat_list + return [ concat_lists[i] for i in range(num) ] def __group_condition(self, col, having = ""): @@ -74,9 +71,8 @@ class TDTestCase: def __concat_current_check(self, tbname, num): concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): - print(1111111111,i) + print(len(concat_condition)) condition = self.__concat_num(concat_condition[i:], num) - print(222222222222) where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) group_no_having= self.__group_condition(condition[0] ) @@ -85,8 +81,7 @@ class TDTestCase: for group_condition in groups: tdSql.query(f"select concat( {','.join( condition ) } ), {','.join(condition)} from {tbname} {where_condition} {group_condition} ") for j in range(tdSql.queryRows): - print(333333333) - tdSql.checkData(j, 0, "".join(tdSql.queryResult[i][1:])) + tdSql.checkData(j, 0, "".join(tdSql.queryResult[i][1:])) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) From 37d3ed7ded1b68b5fcc85dd71f848b780b84b3da Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 17:37:52 +0800 Subject: [PATCH 06/25] fix case --- tests/system-test/2-query/concat.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index ca62186125..e893e28fd3 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -71,7 +71,6 @@ class TDTestCase: def __concat_current_check(self, tbname, num): concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): - print(len(concat_condition)) condition = self.__concat_num(concat_condition[i:], num) where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) From cd075c7739353edbc975e43a13207326c691adc1 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 17:41:05 +0800 Subject: [PATCH 07/25] fix case --- tests/system-test/2-query/concat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index e893e28fd3..6ad3b00962 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -80,7 +80,7 @@ class TDTestCase: for group_condition in groups: tdSql.query(f"select concat( {','.join( condition ) } ), {','.join(condition)} from {tbname} {where_condition} {group_condition} ") for j in range(tdSql.queryRows): - tdSql.checkData(j, 0, "".join(tdSql.queryResult[i][1:])) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) + tdSql.checkData(j, 0, "".join(tdSql.queryResult[j][1:])) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) From 4f0ca52adf5142843a6141ef2205648d65d69f0f Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 17:47:26 +0800 Subject: [PATCH 08/25] fix case --- tests/system-test/2-query/concat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 6ad3b00962..bb5441322c 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -73,7 +73,7 @@ class TDTestCase: for i in range(len(concat_condition) - num + 1 ): condition = self.__concat_num(concat_condition[i:], num) where_condition = self.__where_condition(condition[0]) - group_having = self.__group_condition(condition[0], having=f"{condition} is not null " ) + group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) group_no_having= self.__group_condition(condition[0] ) groups = ["", group_having, group_no_having] From 54d384a5694731923456ac32cb6e79d4e2172bb3 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 18:31:28 +0800 Subject: [PATCH 09/25] fix case --- tests/system-test/2-query/concat.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index bb5441322c..49f289af51 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -72,15 +72,23 @@ class TDTestCase: concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): condition = self.__concat_num(concat_condition[i:], num) + concat_filter = f"concat({','.join( condition ) })" where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) + concat_group_having = self.__group_condition(concat_filter, having=f"{concat_filter} is not null " ) group_no_having= self.__group_condition(condition[0] ) + concat_group_no_having= self.__group_condition(concat_filter) groups = ["", group_having, group_no_having] + concat_groups = ["", concat_group_having, concat_group_no_having] - for group_condition in groups: - tdSql.query(f"select concat( {','.join( condition ) } ), {','.join(condition)} from {tbname} {where_condition} {group_condition} ") + for n in range(len(groups)): + tdSql.query(f"select {','.join(condition)} from {tbname} {where_condition} {groups[n]} ") + concat_data = [] + for m in range(tdSql.queryRows): + concat_data.append("".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_data.append(None) + tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {concat_groups[n]} ") for j in range(tdSql.queryRows): - tdSql.checkData(j, 0, "".join(tdSql.queryResult[j][1:])) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) + tdSql.checkData(j, 0, concat_data[j]) if concat_data[j] else tdSql.checkData(j, 0, None) From 74ce0b61cbc0dd4a304b77e870e20035c06f716a Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 18:44:06 +0800 Subject: [PATCH 10/25] fix case --- tests/system-test/2-query/concat.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 49f289af51..5c1d7289e4 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -83,14 +83,14 @@ class TDTestCase: for n in range(len(groups)): tdSql.query(f"select {','.join(condition)} from {tbname} {where_condition} {groups[n]} ") + rows = tdSql.queryRows concat_data = [] - for m in range(tdSql.queryRows): + for m in range(rows): concat_data.append("".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_data.append(None) tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {concat_groups[n]} ") + tdSql.checkRows(rows) for j in range(tdSql.queryRows): - tdSql.checkData(j, 0, concat_data[j]) if concat_data[j] else tdSql.checkData(j, 0, None) - - + assert tdSql.getData(j, 0) in concat_data def __concat_err_check(self,tbname): From 39e9024124c79cff73a5b70fab9ed87edab1fbc2 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 19:21:34 +0800 Subject: [PATCH 11/25] fix case --- tests/system-test/2-query/concat.py | 72 +++++++++++++++++------------ 1 file changed, 42 insertions(+), 30 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 5c1d7289e4..db3d344831 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -1,3 +1,4 @@ +from pyparsing import nums from util.log import * from util.sql import * from util.cases import * @@ -68,70 +69,79 @@ class TDTestCase: def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " - def __concat_current_check(self, tbname, num): + def __concat_check(self, tbname, num): concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): condition = self.__concat_num(concat_condition[i:], num) - concat_filter = f"concat({','.join( condition ) })" where_condition = self.__where_condition(condition[0]) group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) - concat_group_having = self.__group_condition(concat_filter, having=f"{concat_filter} is not null " ) group_no_having= self.__group_condition(condition[0] ) - concat_group_no_having= self.__group_condition(concat_filter) groups = ["", group_having, group_no_having] - concat_groups = ["", concat_group_having, concat_group_no_having] - for n in range(len(groups)): - tdSql.query(f"select {','.join(condition)} from {tbname} {where_condition} {groups[n]} ") - rows = tdSql.queryRows - concat_data = [] - for m in range(rows): - concat_data.append("".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_data.append(None) - tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {concat_groups[n]} ") - tdSql.checkRows(rows) - for j in range(tdSql.queryRows): - assert tdSql.getData(j, 0) in concat_data + if num > 8 or num < 2 : + [tdSql.error(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ] + + + tdSql.query(f"select {','.join(condition)} from {tbname} ") + rows = tdSql.queryRows + concat_data = [] + for m in range(rows): + concat_data.append("".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_data.append(None) + tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} ") + tdSql.checkRows(rows) + for j in range(tdSql.queryRows): + assert tdSql.getData(j, 0) in concat_data + + [ tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ] def __concat_err_check(self,tbname): sqls = [] - for un_char_col in NUM_COL: + for char_col in CHAR_COL: sqls.extend( ( - f"select concat( {un_char_col} ) from {tbname} ", - f"select concat(ceil( {un_char_col} )) from {tbname} ", - f"select {un_char_col} from {tbname} group by concat( {un_char_col} ) ", + f"select concat( {char_col} ) from {tbname} ", + f"select concat(ceil( {char_col} )) from {tbname} ", + f"select {char_col} from {tbname} group by concat( {char_col} ) ", ) ) - sqls.extend( f"select concat( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in NUM_COL ) - sqls.extend( f"select concat( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat( {char_col} , {num_col} ) from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select concat( {char_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat( {char_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select concat( {ts_col}, {bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL ) + sqls.extend( f"select concat( {num_col} , {ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select concat( {num_col} , {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL) + sqls.extend( f"select concat( {num_col} , {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL) + sqls.extend( f"select concat( {ts_col}, {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat( {bool_col}, {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL ) - sqls.extend( f"select {char_col} from {tbname} group by concat( {char_col} ) " for char_col in CHAR_COL) - sqls.extend( f"select concat( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) - sqls.extend( f"select concat( {char_col} + {ts_col} ) from {tbname} " for char_col in NUM_COL for ts_col in TS_TYPE_COL) sqls.extend( f"select concat( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) - sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) - sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( f"select concat({char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select concat({num_col}, '1') from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select concat({ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat({bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL ) + sqls.extend( f"select concat({char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) sqls.extend( ( f"select concat() from {tbname} ", f"select concat(*) from {tbname} ", f"select concat(ccccccc) from {tbname} ", f"select concat(111) from {tbname} ", - f"select concat(c8, 11) from {tbname} ", ) ) return sqls - def __test_current(self): + def __test_current(self): # sourcery skip: use-itertools-product tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - self.__concat_current_check(tb,2) - tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + for i in range(2,8): + self.__concat_check(tb,i) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") @@ -140,6 +150,8 @@ class TDTestCase: for tb in tbname: for errsql in self.__concat_err_check(tb): tdSql.error(sql=errsql) + self.__concat_check(tb,1) + self.__concat_check(tb,9) tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") From 218150288a18d42f08f12090734d5f1babe0d71e Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 19:27:01 +0800 Subject: [PATCH 12/25] fix case --- tests/system-test/2-query/concat.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index db3d344831..97425d6556 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -1,4 +1,3 @@ -from pyparsing import nums from util.log import * from util.sql import * from util.cases import * @@ -73,10 +72,13 @@ class TDTestCase: concat_condition = self.__concat_condition() for i in range(len(concat_condition) - num + 1 ): condition = self.__concat_num(concat_condition[i:], num) + concat_filter = f"concat( {','.join( condition ) }) " where_condition = self.__where_condition(condition[0]) - group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) - group_no_having= self.__group_condition(condition[0] ) - groups = ["", group_having, group_no_having] + # group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) + concat_group_having = self.__group_condition(concat_filter, having=f"{concat_filter} is not null " ) + # group_no_having= self.__group_condition(condition[0] ) + concat_group_no_having= self.__group_condition(concat_filter) + groups = ["", concat_group_having, concat_group_no_having] if num > 8 or num < 2 : [tdSql.error(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ] From b9113c82c0fe2865dab5b5c0a6808bad63b23038 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 19:53:56 +0800 Subject: [PATCH 13/25] fix case --- tests/system-test/2-query/concat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/concat.py b/tests/system-test/2-query/concat.py index 97425d6556..b50484f76f 100644 --- a/tests/system-test/2-query/concat.py +++ b/tests/system-test/2-query/concat.py @@ -44,7 +44,7 @@ class TDTestCase: concat_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) concat_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) concat_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) - concat_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # concat_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) concat_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) for num_col in NUM_COL: From c59361a8b071707ad30021c3ac0b8892bb90c54d Mon Sep 17 00:00:00 2001 From: cpwu Date: Sat, 7 May 2022 20:02:58 +0800 Subject: [PATCH 14/25] add concat_ws case --- tests/system-test/2-query/concat_ws.py | 287 +++++++++++++++++++++++++ 1 file changed, 287 insertions(+) create mode 100644 tests/system-test/2-query/concat_ws.py diff --git a/tests/system-test/2-query/concat_ws.py b/tests/system-test/2-query/concat_ws.py new file mode 100644 index 0000000000..a91dbd635b --- /dev/null +++ b/tests/system-test/2-query/concat_ws.py @@ -0,0 +1,287 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +PRIMARY_COL = "ts" + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +BOOLEAN_COL = [ BOOL_COL, ] +TS_TYPE_COL = [ TS_COL, ] + + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __concat_ws_condition(self): # sourcery skip: extract-method + concat_ws_condition = [] + for char_col in CHAR_COL: + concat_ws_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + concat_ws_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL) + concat_ws_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL ) + concat_ws_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_ws_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_ws_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # concat_ws_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + concat_ws_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) + + for num_col in NUM_COL: + concat_ws_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + concat_ws_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + + concat_ws_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) + + concat_ws_condition.append('''"test1234!@#$%^&*():'> 0 " + return "" + + def __concat_ws_num(self, concat_ws_lists, num): + return [ concat_ws_lists[i] for i in range(num) ] + + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __concat_ws_check(self, tbname, num): + concat_ws_condition = self.__concat_ws_condition() + for i in range(len(concat_ws_condition) - num + 1 ): + condition = self.__concat_ws_num(concat_ws_condition[i:], num) + concat_ws_filter = f"concat_ws('_', {','.join( condition ) }) " + where_condition = self.__where_condition(condition[0]) + # group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " ) + concat_ws_group_having = self.__group_condition(concat_ws_filter, having=f"{concat_ws_filter} is not null " ) + # group_no_having= self.__group_condition(condition[0] ) + concat_ws_group_no_having= self.__group_condition(concat_ws_filter) + groups = ["", concat_ws_group_having, concat_ws_group_no_having] + + if num > 8 or num < 2 : + [tdSql.error(f"select concat_ws('_', {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ] + + + tdSql.query(f"select {','.join(condition)} from {tbname} ") + rows = tdSql.queryRows + concat_ws_data = [] + for m in range(rows): + concat_ws_data.append("_".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_ws_data.append(None) + tdSql.query(f"select concat_ws('_', {','.join( condition ) }) from {tbname} ") + tdSql.checkRows(rows) + for j in range(tdSql.queryRows): + assert tdSql.getData(j, 0) in concat_ws_data + + [ tdSql.query(f"select concat_ws('_', {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ] + + + def __concat_ws_err_check(self,tbname): + sqls = [] + + for char_col in CHAR_COL: + sqls.extend( + ( + f"select concat_ws('_', {char_col} ) from {tbname} ", + f"select concat_ws('_', ceil( {char_col} )) from {tbname} ", + f"select {char_col} from {tbname} group by concat_ws('_', {char_col} ) ", + ) + ) + + sqls.extend( f"select concat_ws('_', {char_col} , {num_col} ) from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select concat_ws('_', {char_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat_ws('_', {char_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select concat_ws('_', {ts_col}, {bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL ) + sqls.extend( f"select concat_ws('_', {num_col} , {ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select concat_ws('_', {num_col} , {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL) + sqls.extend( f"select concat_ws('_', {num_col} , {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL) + sqls.extend( f"select concat_ws('_', {ts_col}, {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat_ws('_', {bool_col}, {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select concat_ws('_', {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select concat_ws('_', {char_col}, 11) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select concat_ws('_', {num_col}, '1') from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select concat_ws('_', {ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select concat_ws('_', {bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL ) + sqls.extend( f"select concat_ws('_', {char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select concat_ws('_', ) from {tbname} ", + f"select concat_ws('_', *) from {tbname} ", + f"select concat_ws('_', ccccccc) from {tbname} ", + f"select concat_ws('_', 111) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): # sourcery skip: use-itertools-product + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + for i in range(2,8): + self.__concat_ws_check(tb,i) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__concat_ws_err_check(tb): + tdSql.error(sql=errsql) + self.__concat_ws_check(tb,1) + self.__concat_ws_check(tb,9) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.rows = 10 + self.__insert_data(self.rows) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From 5149d3695c90b181408577374baa87b47e054f43 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sun, 8 May 2022 23:38:15 +0800 Subject: [PATCH 15/25] fix case --- tests/system-test/2-query/ltrim.py | 271 +++++++++++++++++++++++++++++ 1 file changed, 271 insertions(+) create mode 100644 tests/system-test/2-query/ltrim.py diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py new file mode 100644 index 0000000000..daf1eca7ff --- /dev/null +++ b/tests/system-test/2-query/ltrim.py @@ -0,0 +1,271 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +PRIMARY_COL = "ts" + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +BOOLEAN_COL = [ BOOL_COL, ] +TS_TYPE_COL = [ TS_COL, ] + + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __ltrim_condition(self): # sourcery skip: extract-method + ltrim_condition = [] + for char_col in CHAR_COL: + ltrim_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + ltrim_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL) + ltrim_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL ) + ltrim_condition.extend( f"concat( cast( {char_col} + {num_col} as binary(16) ), {char_col}) " for num_col in NUM_COL ) + ltrim_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + ltrim_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + ltrim_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # ltrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + ltrim_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) + ltrim_condition.extend( f"concat( {char_col}, {char_col_2} ) " for char_col_2 in CHAR_COL ) + + for num_col in NUM_COL: + ltrim_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + ltrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + + ltrim_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) + + ltrim_condition.append(''' " test1234!@#$%^&*() :'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __ltrim_check(self, tbname, num): + ltrim_condition = self.__ltrim_condition() + for condition in ltrim_condition: + where_condition = self.__where_condition(condition) + ltrim_group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + ltrim_group_no_having= self.__group_condition(condition) + groups = ["", ltrim_group_having, ltrim_group_no_having] + + for group in groups: + tdSql.query(f"select ltrim( {condition}) , {condition} from {tbname} ") + for j in range(tdSql.queryRows): + tdSql.checkData(j,0, tdSql.getData(j,1).lstrip()) + + [ tdSql.query(f"select ltrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] + + + def __ltrim_err_check(self,tbname): + sqls = [] + + for num_col in NUM_COL: + sqls.extend( + ( + f"select ltrim( {num_col} ) from {tbname} ", + f"select ltrim(ceil( {num_col} )) from {tbname} ", + f"select {num_col} from {tbname} group by ltrim( {num_col} ) ", + ) + ) + + sqls.extend( f"select ltrim( {char_col} , {num_col} ) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select ltrim( {num_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select ltrim( {num_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select ltrim( {ts_col}+{bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL ) + sqls.extend( f"select ltrim( {num_col}+{ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select ltrim( {num_col}+ {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL) + sqls.extend( f"select ltrim( {num_col}+ {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL) + sqls.extend( f"select ltrim( {ts_col}+{ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL ) + sqls.extend( f"select ltrim( {bool_col}+ {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select ltrim( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select ltrim({num_col}, '1') from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select ltrim({ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select ltrim({bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL ) + sqls.extend( f"select ltrim({char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select ltrim() from {tbname} ", + f"select ltrim(*) from {tbname} ", + f"select ltrim(ccccccc) from {tbname} ", + f"select ltrim(111) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): # sourcery skip: use-itertools-product + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + for i in range(2,8): + self.__ltrim_check(tb,i) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__ltrim_err_check(tb): + tdSql.error(sql=errsql) + self.__ltrim_check(tb,1) + self.__ltrim_check(tb,9) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.rows = 10 + self.__insert_data(self.rows) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From fe3a8575b28907ce5e03e4b02de43669b26002d0 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sun, 8 May 2022 23:40:23 +0800 Subject: [PATCH 16/25] fix case --- tests/system-test/2-query/rtrim.py | 271 +++++++++++++++++++++++++++++ 1 file changed, 271 insertions(+) create mode 100644 tests/system-test/2-query/rtrim.py diff --git a/tests/system-test/2-query/rtrim.py b/tests/system-test/2-query/rtrim.py new file mode 100644 index 0000000000..1f8a3e240b --- /dev/null +++ b/tests/system-test/2-query/rtrim.py @@ -0,0 +1,271 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +PRIMARY_COL = "ts" + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +BOOLEAN_COL = [ BOOL_COL, ] +TS_TYPE_COL = [ TS_COL, ] + + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __rtrim_condition(self): # sourcery skip: extract-method + rtrim_condition = [] + for char_col in CHAR_COL: + rtrim_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + rtrim_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL) + rtrim_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL ) + rtrim_condition.extend( f"concat( cast( {char_col} + {num_col} as binary(16) ), {char_col}) " for num_col in NUM_COL ) + rtrim_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + rtrim_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + rtrim_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # rtrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + rtrim_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) + rtrim_condition.extend( f"concat( {char_col}, {char_col_2} ) " for char_col_2 in CHAR_COL ) + + for num_col in NUM_COL: + rtrim_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + rtrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + + rtrim_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) + + rtrim_condition.append(''' " test1234!@#$%^&*() :'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __rtrim_check(self, tbname, num): + rtrim_condition = self.__rtrim_condition() + for condition in rtrim_condition: + where_condition = self.__where_condition(condition) + rtrim_group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + rtrim_group_no_having= self.__group_condition(condition) + groups = ["", rtrim_group_having, rtrim_group_no_having] + + for group in groups: + tdSql.query(f"select rtrim( {condition}) , {condition} from {tbname} ") + for j in range(tdSql.queryRows): + tdSql.checkData(j,0, tdSql.getData(j,1).rstrip()) + + [ tdSql.query(f"select rtrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] + + + def __rtrim_err_check(self,tbname): + sqls = [] + + for num_col in NUM_COL: + sqls.extend( + ( + f"select rtrim( {num_col} ) from {tbname} ", + f"select rtrim(ceil( {num_col} )) from {tbname} ", + f"select {num_col} from {tbname} group by rtrim( {num_col} ) ", + ) + ) + + sqls.extend( f"select rtrim( {char_col} , {num_col} ) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select rtrim( {num_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select rtrim( {num_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select rtrim( {ts_col}+{bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL ) + sqls.extend( f"select rtrim( {num_col}+{ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select rtrim( {num_col}+ {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL) + sqls.extend( f"select rtrim( {num_col}+ {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL) + sqls.extend( f"select rtrim( {ts_col}+{ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL ) + sqls.extend( f"select rtrim( {bool_col}+ {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select rtrim( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select rtrim({num_col}, '1') from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select rtrim({ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select rtrim({bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL ) + sqls.extend( f"select rtrim({char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select rtrim() from {tbname} ", + f"select rtrim(*) from {tbname} ", + f"select rtrim(ccccccc) from {tbname} ", + f"select rtrim(111) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): # sourcery skip: use-itertools-product + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + for i in range(2,8): + self.__rtrim_check(tb,i) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__rtrim_err_check(tb): + tdSql.error(sql=errsql) + self.__rtrim_check(tb,1) + self.__rtrim_check(tb,9) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.rows = 10 + self.__insert_data(self.rows) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From 46390534b804a18e4043790147686c584d46f681 Mon Sep 17 00:00:00 2001 From: cpwu Date: Sun, 8 May 2022 23:50:44 +0800 Subject: [PATCH 17/25] fix case --- tests/system-test/2-query/ltrim.py | 16 +- tests/system-test/2-query/rtrim.py | 16 +- tests/system-test/2-query/substr.py | 267 ++++++++++++++++++++++++++++ 3 files changed, 279 insertions(+), 20 deletions(-) create mode 100644 tests/system-test/2-query/substr.py diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py index daf1eca7ff..aeceabd42b 100644 --- a/tests/system-test/2-query/ltrim.py +++ b/tests/system-test/2-query/ltrim.py @@ -66,7 +66,7 @@ class TDTestCase: def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " - def __ltrim_check(self, tbname, num): + def __ltrim_check(self, tbname): ltrim_condition = self.__ltrim_condition() for condition in ltrim_condition: where_condition = self.__where_condition(condition) @@ -74,10 +74,9 @@ class TDTestCase: ltrim_group_no_having= self.__group_condition(condition) groups = ["", ltrim_group_having, ltrim_group_no_having] - for group in groups: - tdSql.query(f"select ltrim( {condition}) , {condition} from {tbname} ") - for j in range(tdSql.queryRows): - tdSql.checkData(j,0, tdSql.getData(j,1).lstrip()) + tdSql.query(f"select ltrim( {condition}) , {condition} from {tbname} ") + for j in range(tdSql.queryRows): + tdSql.checkData(j,0, tdSql.getData(j,1).lstrip()) [ tdSql.query(f"select ltrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] @@ -125,9 +124,8 @@ class TDTestCase: tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - for i in range(2,8): - self.__ltrim_check(tb,i) - tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + self.__ltrim_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") @@ -136,8 +134,6 @@ class TDTestCase: for tb in tbname: for errsql in self.__ltrim_err_check(tb): tdSql.error(sql=errsql) - self.__ltrim_check(tb,1) - self.__ltrim_check(tb,9) tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") diff --git a/tests/system-test/2-query/rtrim.py b/tests/system-test/2-query/rtrim.py index 1f8a3e240b..3cced8dbdb 100644 --- a/tests/system-test/2-query/rtrim.py +++ b/tests/system-test/2-query/rtrim.py @@ -66,7 +66,7 @@ class TDTestCase: def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " - def __rtrim_check(self, tbname, num): + def __rtrim_check(self, tbname): rtrim_condition = self.__rtrim_condition() for condition in rtrim_condition: where_condition = self.__where_condition(condition) @@ -74,10 +74,9 @@ class TDTestCase: rtrim_group_no_having= self.__group_condition(condition) groups = ["", rtrim_group_having, rtrim_group_no_having] - for group in groups: - tdSql.query(f"select rtrim( {condition}) , {condition} from {tbname} ") - for j in range(tdSql.queryRows): - tdSql.checkData(j,0, tdSql.getData(j,1).rstrip()) + tdSql.query(f"select rtrim( {condition}) , {condition} from {tbname} ") + for j in range(tdSql.queryRows): + tdSql.checkData(j,0, tdSql.getData(j,1).rstrip()) [ tdSql.query(f"select rtrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] @@ -125,9 +124,8 @@ class TDTestCase: tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: - for i in range(2,8): - self.__rtrim_check(tb,i) - tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + self.__rtrim_check(tb) + tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") @@ -136,8 +134,6 @@ class TDTestCase: for tb in tbname: for errsql in self.__rtrim_err_check(tb): tdSql.error(sql=errsql) - self.__rtrim_check(tb,1) - self.__rtrim_check(tb,9) tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py new file mode 100644 index 0000000000..4780939ddf --- /dev/null +++ b/tests/system-test/2-query/substr.py @@ -0,0 +1,267 @@ +from util.log import * +from util.sql import * +from util.cases import * +from util.dnodes import * + + +PRIMARY_COL = "ts" + +INT_COL = "c1" +BINT_COL = "c2" +SINT_COL = "c3" +TINT_COL = "c4" +FLOAT_COL = "c5" +DOUBLE_COL = "c6" +BOOL_COL = "c7" + +BINARY_COL = "c8" +NCHAR_COL = "c9" +TS_COL = "c10" + +NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ] +CHAR_COL = [ BINARY_COL, NCHAR_COL, ] +BOOLEAN_COL = [ BOOL_COL, ] +TS_TYPE_COL = [ TS_COL, ] + + +class TDTestCase: + + def init(self, conn, logSql): + tdLog.debug(f"start to excute {__file__}") + tdSql.init(conn.cursor()) + + def __substr_condition(self): # sourcery skip: extract-method + substr_condition = [] + for char_col in CHAR_COL: + substr_condition.extend( + ( + char_col, + f"upper( {char_col} )", + ) + ) + substr_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL) + substr_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL ) + substr_condition.extend( f"concat( cast( {char_col} + {num_col} as binary(16) ), {char_col}) " for num_col in NUM_COL ) + substr_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + substr_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + substr_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # substr_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + substr_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) + substr_condition.extend( f"concat( {char_col}, {char_col_2} ) " for char_col_2 in CHAR_COL ) + + for num_col in NUM_COL: + substr_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) + substr_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + + substr_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) + + substr_condition.append(''' " test1234!@#$%^&*() :'> 0 " + return "" + + def __group_condition(self, col, having = ""): + return f" group by {col} having {having}" if having else f" group by {col} " + + def __substr_check(self, tbname,pos, lens=2): + substr_condition = self.__substr_condition() + for condition in substr_condition: + where_condition = self.__where_condition(condition) + substr_group_having = self.__group_condition(condition, having=f"{condition} is not null " ) + substr_group_no_having= self.__group_condition(condition) + groups = ["", substr_group_having, substr_group_no_having] + + tdSql.query(f"select substr( {condition}, {pos}, {lens}) , {condition} from {tbname} ") + for j in range(tdSql.queryRows): + tdSql.checkData(j,0, tdSql.getData(j,1)[pos:lens]) + + [ tdSql.query(f"select substr({condition}) from {tbname} {where_condition} {group} ") for group in groups ] + + + def __substr_err_check(self,tbname): + sqls = [] + + for num_col in NUM_COL: + sqls.extend( + ( + f"select substr( {num_col} ) from {tbname} ", + f"select substr(ceil( {num_col} )) from {tbname} ", + f"select {num_col} from {tbname} group by substr( {num_col} ) ", + ) + ) + + sqls.extend( f"select substr( {char_col} , {num_col} ) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select substr( {num_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select substr( {num_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select substr( {ts_col}+{bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL ) + sqls.extend( f"select substr( {num_col}+{ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL) + sqls.extend( f"select substr( {num_col}+ {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL) + sqls.extend( f"select substr( {num_col}+ {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL) + sqls.extend( f"select substr( {ts_col}+{ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL ) + sqls.extend( f"select substr( {bool_col}+ {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL ) + + sqls.extend( f"select substr( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL ) + sqls.extend( f"select substr({num_col}, '1') from {tbname} " for num_col in NUM_COL ) + sqls.extend( f"select substr({ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL ) + sqls.extend( f"select substr({bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL ) + sqls.extend( f"select substr({char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL ) + sqls.extend( + ( + f"select substr() from {tbname} ", + f"select substr(*) from {tbname} ", + f"select substr(ccccccc) from {tbname} ", + f"select substr(111) from {tbname} ", + ) + ) + + return sqls + + def __test_current(self): # sourcery skip: use-itertools-product + tdLog.printNoPrefix("==========current sql condition check , must return query ok==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + for tb in tbname: + self.__substr_check(tb, 1, 6) + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") + + def __test_error(self): + tdLog.printNoPrefix("==========err sql condition check , must return error==========") + tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] + + for tb in tbname: + for errsql in self.__substr_err_check(tb): + tdSql.error(sql=errsql) + tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") + + + def all_test(self): + self.__test_current() + self.__test_error() + + + def __create_tb(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + create_stb_sql = f'''create table stb1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) tags (t1 int) + ''' + create_ntb_sql = f'''create table t1( + ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint, + {FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool, + {BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp + ) + ''' + tdSql.execute(create_stb_sql) + tdSql.execute(create_ntb_sql) + + for i in range(4): + tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )') + + def __insert_data(self, rows): + now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000) + for i in range(rows): + tdSql.execute( + f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )" + ) + tdSql.execute( + f'''insert into ct1 values + ( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } ) + ( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } ) + ''' + ) + + tdSql.execute( + f'''insert into ct4 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000} + ) + ( + { now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000} + ) + ''' + ) + + tdSql.execute( + f'''insert into ct2 values + ( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( + { now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126, + { -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127, + { - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + for i in range(rows): + insert_data = f'''insert into t1 values + ( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}, + "binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } ) + ''' + tdSql.execute(insert_data) + tdSql.execute( + f'''insert into t1 values + ( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL ) + ( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127, + { 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, + "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 } + ) + ( + { now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126, + { 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, + "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 } + ) + ''' + ) + + def run(self): + tdSql.prepare() + + tdLog.printNoPrefix("==========step1:create table") + self.__create_tb() + + tdLog.printNoPrefix("==========step2:insert data") + self.rows = 10 + self.__insert_data(self.rows) + + tdLog.printNoPrefix("==========step3:all check") + self.all_test() + + # tdDnodes.stop(1) + # tdDnodes.start(1) + + # tdSql.execute("use db") + + # tdLog.printNoPrefix("==========step4:after wal, all check again ") + # self.all_test() + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From 123ac754017e52f8a7abebf17dc47a6731c87e34 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:01:42 +0800 Subject: [PATCH 18/25] fix case --- tests/system-test/2-query/substr.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py index 4780939ddf..9e36de3fd6 100644 --- a/tests/system-test/2-query/substr.py +++ b/tests/system-test/2-query/substr.py @@ -74,9 +74,12 @@ class TDTestCase: substr_group_no_having= self.__group_condition(condition) groups = ["", substr_group_having, substr_group_no_having] + if pos == 0: + tdSql.error(f"select substr( {condition}, {pos}, {lens}) , {condition} from {tbname} ") + tdSql.query(f"select substr( {condition}, {pos}, {lens}) , {condition} from {tbname} ") for j in range(tdSql.queryRows): - tdSql.checkData(j,0, tdSql.getData(j,1)[pos:lens]) + tdSql.checkData(j,0, tdSql.getData(j,1)[pos-1:lens]) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) [ tdSql.query(f"select substr({condition}) from {tbname} {where_condition} {group} ") for group in groups ] @@ -134,6 +137,7 @@ class TDTestCase: for tb in tbname: for errsql in self.__substr_err_check(tb): tdSql.error(sql=errsql) + self.__substr_check(tb, 0, 6) tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========") From efc6b96746c4df21adecd6c98586419ba87b0d3c Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:01:48 +0800 Subject: [PATCH 19/25] fix case --- tests/system-test/2-query/substr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py index 9e36de3fd6..fb4b54a525 100644 --- a/tests/system-test/2-query/substr.py +++ b/tests/system-test/2-query/substr.py @@ -74,7 +74,7 @@ class TDTestCase: substr_group_no_having= self.__group_condition(condition) groups = ["", substr_group_having, substr_group_no_having] - if pos == 0: + if pos < 1: tdSql.error(f"select substr( {condition}, {pos}, {lens}) , {condition} from {tbname} ") tdSql.query(f"select substr( {condition}, {pos}, {lens}) , {condition} from {tbname} ") From 472ade70623cf022006a21229ce983ef18b104d6 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:03:53 +0800 Subject: [PATCH 20/25] fix case --- tests/system-test/2-query/substr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py index fb4b54a525..5fed3762e8 100644 --- a/tests/system-test/2-query/substr.py +++ b/tests/system-test/2-query/substr.py @@ -81,7 +81,7 @@ class TDTestCase: for j in range(tdSql.queryRows): tdSql.checkData(j,0, tdSql.getData(j,1)[pos-1:lens]) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) - [ tdSql.query(f"select substr({condition}) from {tbname} {where_condition} {group} ") for group in groups ] + [ tdSql.query(f"select substr({condition}, {pos}, {lens}) from {tbname} {where_condition} {group} ") for group in groups ] def __substr_err_check(self,tbname): From df0c0037eac8d57d90de8b0922cac494027addf6 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:09:32 +0800 Subject: [PATCH 21/25] fix case --- tests/system-test/2-query/ltrim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py index aeceabd42b..74329566d6 100644 --- a/tests/system-test/2-query/ltrim.py +++ b/tests/system-test/2-query/ltrim.py @@ -45,7 +45,7 @@ class TDTestCase: ltrim_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) ltrim_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) ltrim_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) - # ltrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + ltrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) ltrim_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) ltrim_condition.extend( f"concat( {char_col}, {char_col_2} ) " for char_col_2 in CHAR_COL ) From 96e08600b632d2504ddd505235b01f1dde7dd128 Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:09:41 +0800 Subject: [PATCH 22/25] fix case --- tests/system-test/2-query/ltrim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py index 74329566d6..af49811a29 100644 --- a/tests/system-test/2-query/ltrim.py +++ b/tests/system-test/2-query/ltrim.py @@ -51,7 +51,7 @@ class TDTestCase: for num_col in NUM_COL: ltrim_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) - ltrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + ltrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL) ltrim_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) From eb8d630d2ceac2b2dd1785589dd76ddeaf96c8ab Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:20:40 +0800 Subject: [PATCH 23/25] fix case --- tests/system-test/2-query/ltrim.py | 6 +++--- tests/system-test/2-query/rtrim.py | 6 +++--- tests/system-test/2-query/substr.py | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py index af49811a29..ef3fd3de21 100644 --- a/tests/system-test/2-query/ltrim.py +++ b/tests/system-test/2-query/ltrim.py @@ -45,7 +45,7 @@ class TDTestCase: ltrim_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) ltrim_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) ltrim_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) - ltrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + # ltrim_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) ltrim_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL ) ltrim_condition.extend( f"concat( {char_col}, {char_col_2} ) " for char_col_2 in CHAR_COL ) @@ -76,7 +76,7 @@ class TDTestCase: tdSql.query(f"select ltrim( {condition}) , {condition} from {tbname} ") for j in range(tdSql.queryRows): - tdSql.checkData(j,0, tdSql.getData(j,1).lstrip()) + tdSql.checkData(j,0, tdSql.getData(j,1).lstrip()) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) [ tdSql.query(f"select ltrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] @@ -125,7 +125,7 @@ class TDTestCase: tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: self.__ltrim_check(tb) - tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") diff --git a/tests/system-test/2-query/rtrim.py b/tests/system-test/2-query/rtrim.py index 3cced8dbdb..f99145b7b1 100644 --- a/tests/system-test/2-query/rtrim.py +++ b/tests/system-test/2-query/rtrim.py @@ -51,7 +51,7 @@ class TDTestCase: for num_col in NUM_COL: rtrim_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) - rtrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + rtrim_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL ) rtrim_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) @@ -76,7 +76,7 @@ class TDTestCase: tdSql.query(f"select rtrim( {condition}) , {condition} from {tbname} ") for j in range(tdSql.queryRows): - tdSql.checkData(j,0, tdSql.getData(j,1).rstrip()) + tdSql.checkData(j,0, tdSql.getData(j,1).rstrip()) if tdSql.getData(j,1) else tdSql.checkData(j, 0, None) [ tdSql.query(f"select rtrim({condition}) from {tbname} {where_condition} {group} ") for group in groups ] @@ -125,7 +125,7 @@ class TDTestCase: tbname = ["ct1", "ct2", "ct4", "t1", "stb1"] for tb in tbname: self.__rtrim_check(tb) - tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========") + tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========") def __test_error(self): tdLog.printNoPrefix("==========err sql condition check , must return error==========") diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py index 5fed3762e8..1b3b5895f4 100644 --- a/tests/system-test/2-query/substr.py +++ b/tests/system-test/2-query/substr.py @@ -51,7 +51,7 @@ class TDTestCase: for num_col in NUM_COL: substr_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL ) - substr_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL ) + substr_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL) substr_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL ) From b82befef7154e1b677acb87a4f4b9dc497c4984a Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 00:27:52 +0800 Subject: [PATCH 24/25] fix case --- tests/system-test/2-query/ltrim.py | 10 +++++----- tests/system-test/2-query/rtrim.py | 10 +++++----- tests/system-test/2-query/substr.py | 14 +++++++------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tests/system-test/2-query/ltrim.py b/tests/system-test/2-query/ltrim.py index ef3fd3de21..15f40a09c3 100644 --- a/tests/system-test/2-query/ltrim.py +++ b/tests/system-test/2-query/ltrim.py @@ -251,13 +251,13 @@ class TDTestCase: tdLog.printNoPrefix("==========step3:all check") self.all_test() - # tdDnodes.stop(1) - # tdDnodes.start(1) + tdDnodes.stop(1) + tdDnodes.start(1) - # tdSql.execute("use db") + tdSql.execute("use db") - # tdLog.printNoPrefix("==========step4:after wal, all check again ") - # self.all_test() + tdLog.printNoPrefix("==========step4:after wal, all check again ") + self.all_test() def stop(self): tdSql.close() diff --git a/tests/system-test/2-query/rtrim.py b/tests/system-test/2-query/rtrim.py index f99145b7b1..30624792cc 100644 --- a/tests/system-test/2-query/rtrim.py +++ b/tests/system-test/2-query/rtrim.py @@ -251,13 +251,13 @@ class TDTestCase: tdLog.printNoPrefix("==========step3:all check") self.all_test() - # tdDnodes.stop(1) - # tdDnodes.start(1) + tdDnodes.stop(1) + tdDnodes.start(1) - # tdSql.execute("use db") + tdSql.execute("use db") - # tdLog.printNoPrefix("==========step4:after wal, all check again ") - # self.all_test() + tdLog.printNoPrefix("==========step4:after wal, all check again ") + self.all_test() def stop(self): tdSql.close() diff --git a/tests/system-test/2-query/substr.py b/tests/system-test/2-query/substr.py index 1b3b5895f4..e78606826b 100644 --- a/tests/system-test/2-query/substr.py +++ b/tests/system-test/2-query/substr.py @@ -66,7 +66,7 @@ class TDTestCase: def __group_condition(self, col, having = ""): return f" group by {col} having {having}" if having else f" group by {col} " - def __substr_check(self, tbname,pos, lens=2): + def __substr_check(self, tbname,pos, lens=None): substr_condition = self.__substr_condition() for condition in substr_condition: where_condition = self.__where_condition(condition) @@ -96,7 +96,7 @@ class TDTestCase: ) ) - sqls.extend( f"select substr( {char_col} , {num_col} ) from {tbname} " for char_col in CHAR_COL ) + sqls.extend( f"select substr( {char_col} + {num_col} ) from {tbname} " for char_col in CHAR_COL ) sqls.extend( f"select substr( {num_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL ) sqls.extend( f"select substr( {num_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL ) @@ -255,13 +255,13 @@ class TDTestCase: tdLog.printNoPrefix("==========step3:all check") self.all_test() - # tdDnodes.stop(1) - # tdDnodes.start(1) + tdDnodes.stop(1) + tdDnodes.start(1) - # tdSql.execute("use db") + tdSql.execute("use db") - # tdLog.printNoPrefix("==========step4:after wal, all check again ") - # self.all_test() + tdLog.printNoPrefix("==========step4:after wal, all check again ") + self.all_test() def stop(self): tdSql.close() From 13dbfb1490e1b94ad566df8bb86c015f89304a8b Mon Sep 17 00:00:00 2001 From: cpwu Date: Mon, 9 May 2022 10:20:11 +0800 Subject: [PATCH 25/25] test: add ltrim add rtrim to CI --- tests/system-test/fulltest.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/system-test/fulltest.sh b/tests/system-test/fulltest.sh index cb9d472116..4eef6a943f 100755 --- a/tests/system-test/fulltest.sh +++ b/tests/system-test/fulltest.sh @@ -11,6 +11,8 @@ python3 ./test.py -f 0-others/telemetry.py #python3 ./test.py -f 2-query/between.py python3 ./test.py -f 2-query/distinct.py python3 ./test.py -f 2-query/varchar.py +python3 ./test.py -f 2-query/ltrim.py +python3 ./test.py -f 2-query/rtrim.py python3 ./test.py -f 2-query/timezone.py python3 ./test.py -f 2-query/Now.py