Merge pull request #13032 from taosdata/cpwu/3.0
test: add hyperloglog case to CI
This commit is contained in:
commit
8bf795d8ca
|
@ -175,16 +175,17 @@ class TDTestCase:
|
|||
|
||||
tdLog.printNoPrefix("==========step10:invalid query type")
|
||||
|
||||
tdSql.query("select * from supt where location between 'beijing' and 'shanghai'")
|
||||
tdSql.checkRows(23)
|
||||
# 非0值均解析为1,因此"between 负值 and o"解析为"between 1 and 0"
|
||||
tdSql.query("select * from supt where isused between 0 and 1")
|
||||
tdSql.checkRows(23)
|
||||
tdSql.query("select * from supt where isused between -1 and 0")
|
||||
tdSql.checkRows(0)
|
||||
tdSql.error("select * from supt where isused between false and true")
|
||||
tdSql.query("select * from supt where family between '拖拉机' and '自行车'")
|
||||
tdSql.checkRows(23)
|
||||
# TODO tag is not finished
|
||||
# tdSql.query("select * from supt where location between 'beijing' and 'shanghai'")
|
||||
# tdSql.checkRows(23)
|
||||
# # 非0值均解析为1,因此"between 负值 and o"解析为"between 1 and 0"
|
||||
# tdSql.query("select * from supt where isused between 0 and 1")
|
||||
# tdSql.checkRows(23)
|
||||
# tdSql.query("select * from supt where isused between -1 and 0")
|
||||
# tdSql.checkRows(0)
|
||||
# tdSql.error("select * from supt where isused between false and true")
|
||||
# tdSql.query("select * from supt where family between '拖拉机' and '自行车'")
|
||||
# tdSql.checkRows(23)
|
||||
|
||||
tdLog.printNoPrefix("==========step11:query HEX/OCT/BIN type")
|
||||
|
||||
|
|
|
@ -36,19 +36,19 @@ class TDTestCase:
|
|||
concat_condition.extend(
|
||||
(
|
||||
char_col,
|
||||
f"upper( {char_col} )",
|
||||
# f"upper( {char_col} )",
|
||||
)
|
||||
)
|
||||
concat_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL)
|
||||
concat_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL )
|
||||
concat_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
# concat_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
concat_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL )
|
||||
|
||||
for num_col in NUM_COL:
|
||||
concat_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL)
|
||||
|
||||
concat_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL )
|
||||
|
@ -96,7 +96,6 @@ class TDTestCase:
|
|||
|
||||
[ tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ]
|
||||
|
||||
|
||||
def __concat_err_check(self,tbname):
|
||||
sqls = []
|
||||
|
||||
|
@ -139,7 +138,11 @@ class TDTestCase:
|
|||
|
||||
def __test_current(self): # sourcery skip: use-itertools-product
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
|
||||
tbname = [
|
||||
"ct1",
|
||||
"ct2",
|
||||
"ct4",
|
||||
]
|
||||
for tb in tbname:
|
||||
for i in range(2,8):
|
||||
self.__concat_check(tb,i)
|
||||
|
@ -147,7 +150,10 @@ class TDTestCase:
|
|||
|
||||
def __test_error(self):
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
|
||||
tbname = [
|
||||
"t1",
|
||||
"stb1",
|
||||
]
|
||||
|
||||
for tb in tbname:
|
||||
for errsql in self.__concat_err_check(tb):
|
||||
|
|
|
@ -0,0 +1,293 @@
|
|||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
def __concat_condition(self): # sourcery skip: extract-method
|
||||
concat_condition = []
|
||||
for char_col in CHAR_COL:
|
||||
concat_condition.extend(
|
||||
(
|
||||
char_col,
|
||||
# f"upper( {char_col} )",
|
||||
)
|
||||
)
|
||||
concat_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL)
|
||||
concat_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL )
|
||||
# concat_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
# concat_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
concat_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL )
|
||||
|
||||
for num_col in NUM_COL:
|
||||
# concat_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL)
|
||||
|
||||
concat_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL )
|
||||
|
||||
concat_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
|
||||
|
||||
return concat_condition
|
||||
|
||||
def __where_condition(self, col):
|
||||
# return f" where count({col}) > 0 "
|
||||
return ""
|
||||
|
||||
def __concat_num(self, concat_lists, num):
|
||||
return [ concat_lists[i] for i in range(num) ]
|
||||
|
||||
|
||||
def __group_condition(self, col, having = ""):
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __concat_check(self, tbname, num):
|
||||
concat_condition = self.__concat_condition()
|
||||
for i in range(len(concat_condition) - num + 1 ):
|
||||
condition = self.__concat_num(concat_condition[i:], num)
|
||||
concat_filter = f"concat( {','.join( condition ) }) "
|
||||
where_condition = self.__where_condition(condition[0])
|
||||
# group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " )
|
||||
concat_group_having = self.__group_condition(concat_filter, having=f"{concat_filter} is not null " )
|
||||
# group_no_having= self.__group_condition(condition[0] )
|
||||
concat_group_no_having= self.__group_condition(concat_filter)
|
||||
groups = ["", concat_group_having, concat_group_no_having]
|
||||
|
||||
if num > 8 or num < 2 :
|
||||
[tdSql.error(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ]
|
||||
break
|
||||
|
||||
tdSql.query(f"select {','.join(condition)} from {tbname} ")
|
||||
rows = tdSql.queryRows
|
||||
concat_data = []
|
||||
for m in range(rows):
|
||||
concat_data.append("".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_data.append(None)
|
||||
tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} ")
|
||||
tdSql.checkRows(rows)
|
||||
for j in range(tdSql.queryRows):
|
||||
assert tdSql.getData(j, 0) in concat_data
|
||||
|
||||
[ tdSql.query(f"select concat( {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ]
|
||||
|
||||
|
||||
def __concat_err_check(self,tbname):
|
||||
sqls = []
|
||||
|
||||
for char_col in CHAR_COL:
|
||||
sqls.extend(
|
||||
(
|
||||
f"select concat( {char_col} ) from {tbname} ",
|
||||
f"select concat(ceil( {char_col} )) from {tbname} ",
|
||||
f"select {char_col} from {tbname} group by concat( {char_col} ) ",
|
||||
)
|
||||
)
|
||||
|
||||
sqls.extend( f"select concat( {char_col} , {num_col} ) from {tbname} " for num_col in NUM_COL )
|
||||
sqls.extend( f"select concat( {char_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat( {char_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL )
|
||||
|
||||
sqls.extend( f"select concat( {ts_col}, {bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL )
|
||||
sqls.extend( f"select concat( {num_col} , {ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL)
|
||||
sqls.extend( f"select concat( {num_col} , {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL)
|
||||
sqls.extend( f"select concat( {num_col} , {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL)
|
||||
sqls.extend( f"select concat( {ts_col}, {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat( {bool_col}, {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL )
|
||||
|
||||
sqls.extend( f"select concat( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
|
||||
sqls.extend( f"select concat({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
|
||||
sqls.extend( f"select concat({num_col}, '1') from {tbname} " for num_col in NUM_COL )
|
||||
sqls.extend( f"select concat({ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat({bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL )
|
||||
sqls.extend( f"select concat({char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
|
||||
sqls.extend(
|
||||
(
|
||||
f"select concat() from {tbname} ",
|
||||
f"select concat(*) from {tbname} ",
|
||||
f"select concat(ccccccc) from {tbname} ",
|
||||
f"select concat(111) from {tbname} ",
|
||||
)
|
||||
)
|
||||
|
||||
return sqls
|
||||
|
||||
def __test_current(self): # sourcery skip: use-itertools-product
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
tbname = [
|
||||
"t1",
|
||||
"stb1",
|
||||
]
|
||||
for tb in tbname:
|
||||
for i in range(2,8):
|
||||
self.__concat_check(tb,i)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========")
|
||||
|
||||
def __test_error(self):
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
tbname = [
|
||||
"ct1",
|
||||
"ct4",
|
||||
]
|
||||
|
||||
for tb in tbname:
|
||||
for errsql in self.__concat_err_check(tb):
|
||||
tdSql.error(sql=errsql)
|
||||
self.__concat_check(tb,1)
|
||||
self.__concat_check(tb,9)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
|
||||
|
||||
|
||||
def all_test(self):
|
||||
self.__test_current()
|
||||
self.__test_error()
|
||||
|
||||
|
||||
def __create_tb(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -36,22 +36,22 @@ class TDTestCase:
|
|||
concat_ws_condition.extend(
|
||||
(
|
||||
char_col,
|
||||
f"upper( {char_col} )",
|
||||
# f"upper( {char_col} )",
|
||||
)
|
||||
)
|
||||
concat_ws_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL)
|
||||
concat_ws_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL )
|
||||
concat_ws_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_ws_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_ws_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_ws_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_ws_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
# concat_ws_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
concat_ws_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL )
|
||||
|
||||
for num_col in NUM_COL:
|
||||
concat_ws_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_ws_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_ws_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL)
|
||||
|
||||
concat_ws_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL )
|
||||
# concat_ws_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL )
|
||||
|
||||
concat_ws_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
|
||||
|
||||
|
@ -139,7 +139,10 @@ class TDTestCase:
|
|||
|
||||
def __test_current(self): # sourcery skip: use-itertools-product
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
|
||||
tbname = [
|
||||
"t1",
|
||||
"stb1"
|
||||
]
|
||||
for tb in tbname:
|
||||
for i in range(2,8):
|
||||
self.__concat_ws_check(tb,i)
|
||||
|
@ -147,7 +150,11 @@ class TDTestCase:
|
|||
|
||||
def __test_error(self):
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
|
||||
tbname = [
|
||||
"ct1",
|
||||
"ct2",
|
||||
"ct4",
|
||||
]
|
||||
|
||||
for tb in tbname:
|
||||
for errsql in self.__concat_ws_err_check(tb):
|
||||
|
|
|
@ -0,0 +1,294 @@
|
|||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
def __concat_ws_condition(self): # sourcery skip: extract-method
|
||||
concat_ws_condition = []
|
||||
for char_col in CHAR_COL:
|
||||
concat_ws_condition.extend(
|
||||
(
|
||||
char_col,
|
||||
# f"upper( {char_col} )",
|
||||
)
|
||||
)
|
||||
concat_ws_condition.extend( f"cast( {num_col} as binary(16) ) " for num_col in NUM_COL)
|
||||
concat_ws_condition.extend( f"cast( {char_col} + {num_col} as binary(16) ) " for num_col in NUM_COL )
|
||||
# concat_ws_condition.extend( f"cast( {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
# concat_ws_condition.extend( f"cast( {char_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_ws_condition.extend( f"cast( {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
# concat_ws_condition.extend( f"cast( {char_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL )
|
||||
concat_ws_condition.extend( f"cast( {char_col} + {char_col_2} as binary(16) ) " for char_col_2 in CHAR_COL )
|
||||
|
||||
for num_col in NUM_COL:
|
||||
# concat_ws_condition.extend( f"cast( {num_col} + {bool_col} as binary(16) )" for bool_col in BOOLEAN_COL )
|
||||
concat_ws_condition.extend( f"cast( {num_col} + {ts_col} as binary(16) )" for ts_col in TS_TYPE_COL if num_col is not FLOAT_COL and num_col is not DOUBLE_COL)
|
||||
|
||||
# concat_ws_condition.extend( f"cast( {bool_col} + {ts_col} as binary(16) )" for bool_col in BOOLEAN_COL for ts_col in TS_TYPE_COL )
|
||||
|
||||
concat_ws_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
|
||||
|
||||
return concat_ws_condition
|
||||
|
||||
def __where_condition(self, col):
|
||||
# return f" where count({col}) > 0 "
|
||||
return ""
|
||||
|
||||
def __concat_ws_num(self, concat_ws_lists, num):
|
||||
return [ concat_ws_lists[i] for i in range(num) ]
|
||||
|
||||
|
||||
def __group_condition(self, col, having = ""):
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __concat_ws_check(self, tbname, num):
|
||||
concat_ws_condition = self.__concat_ws_condition()
|
||||
for i in range(len(concat_ws_condition) - num + 1 ):
|
||||
condition = self.__concat_ws_num(concat_ws_condition[i:], num)
|
||||
concat_ws_filter = f"concat_ws('_', {','.join( condition ) }) "
|
||||
where_condition = self.__where_condition(condition[0])
|
||||
# group_having = self.__group_condition(condition[0], having=f"{condition[0]} is not null " )
|
||||
concat_ws_group_having = self.__group_condition(concat_ws_filter, having=f"{concat_ws_filter} is not null " )
|
||||
# group_no_having= self.__group_condition(condition[0] )
|
||||
concat_ws_group_no_having= self.__group_condition(concat_ws_filter)
|
||||
groups = ["", concat_ws_group_having, concat_ws_group_no_having]
|
||||
|
||||
if num > 8 or num < 2 :
|
||||
[tdSql.error(f"select concat_ws('_', {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ]
|
||||
break
|
||||
|
||||
tdSql.query(f"select {','.join(condition)} from {tbname} ")
|
||||
rows = tdSql.queryRows
|
||||
concat_ws_data = []
|
||||
for m in range(rows):
|
||||
concat_ws_data.append("_".join(tdSql.queryResult[m])) if tdSql.getData(m, 0) else concat_ws_data.append(None)
|
||||
tdSql.query(f"select concat_ws('_', {','.join( condition ) }) from {tbname} ")
|
||||
tdSql.checkRows(rows)
|
||||
for j in range(tdSql.queryRows):
|
||||
assert tdSql.getData(j, 0) in concat_ws_data
|
||||
|
||||
[ tdSql.query(f"select concat_ws('_', {','.join( condition ) }) from {tbname} {where_condition} {group} ") for group in groups ]
|
||||
|
||||
|
||||
def __concat_ws_err_check(self,tbname):
|
||||
sqls = []
|
||||
|
||||
for char_col in CHAR_COL:
|
||||
sqls.extend(
|
||||
(
|
||||
f"select concat_ws('_', {char_col} ) from {tbname} ",
|
||||
f"select concat_ws('_', ceil( {char_col} )) from {tbname} ",
|
||||
f"select {char_col} from {tbname} group by concat_ws('_', {char_col} ) ",
|
||||
)
|
||||
)
|
||||
|
||||
sqls.extend( f"select concat_ws('_', {char_col} , {num_col} ) from {tbname} " for num_col in NUM_COL )
|
||||
sqls.extend( f"select concat_ws('_', {char_col} , {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat_ws('_', {char_col} , {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL )
|
||||
|
||||
sqls.extend( f"select concat_ws('_', {ts_col}, {bool_col} ) from {tbname} " for ts_col in TS_TYPE_COL for bool_col in BOOLEAN_COL )
|
||||
sqls.extend( f"select concat_ws('_', {num_col} , {ts_col} ) from {tbname} " for num_col in NUM_COL for ts_col in TS_TYPE_COL)
|
||||
sqls.extend( f"select concat_ws('_', {num_col} , {bool_col} ) from {tbname} " for num_col in NUM_COL for bool_col in BOOLEAN_COL)
|
||||
sqls.extend( f"select concat_ws('_', {num_col} , {num_col} ) from {tbname} " for num_col in NUM_COL for num_col in NUM_COL)
|
||||
sqls.extend( f"select concat_ws('_', {ts_col}, {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat_ws('_', {bool_col}, {bool_col} ) from {tbname} " for bool_col in BOOLEAN_COL for bool_col in BOOLEAN_COL )
|
||||
|
||||
sqls.extend( f"select concat_ws('_', {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
|
||||
sqls.extend( f"select concat_ws('_', {char_col}, 11) from {tbname} " for char_col in CHAR_COL )
|
||||
sqls.extend( f"select concat_ws('_', {num_col}, '1') from {tbname} " for num_col in NUM_COL )
|
||||
sqls.extend( f"select concat_ws('_', {ts_col}, '1') from {tbname} " for ts_col in TS_TYPE_COL )
|
||||
sqls.extend( f"select concat_ws('_', {bool_col}, '1') from {tbname} " for bool_col in BOOLEAN_COL )
|
||||
sqls.extend( f"select concat_ws('_', {char_col},'1') from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
|
||||
sqls.extend(
|
||||
(
|
||||
f"select concat_ws('_', ) from {tbname} ",
|
||||
f"select concat_ws('_', *) from {tbname} ",
|
||||
f"select concat_ws('_', ccccccc) from {tbname} ",
|
||||
f"select concat_ws('_', 111) from {tbname} ",
|
||||
)
|
||||
)
|
||||
|
||||
return sqls
|
||||
|
||||
def __test_current(self): # sourcery skip: use-itertools-product
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
tbname = [
|
||||
"ct1",
|
||||
"ct2",
|
||||
"ct4",
|
||||
]
|
||||
for tb in tbname:
|
||||
for i in range(2,8):
|
||||
self.__concat_ws_check(tb,i)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tb}, col num: {i} over==========")
|
||||
|
||||
def __test_error(self):
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
tbname = [
|
||||
"t1",
|
||||
"stb1"
|
||||
]
|
||||
|
||||
for tb in tbname:
|
||||
for errsql in self.__concat_ws_err_check(tb):
|
||||
tdSql.error(sql=errsql)
|
||||
self.__concat_ws_check(tb,1)
|
||||
self.__concat_ws_check(tb,9)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
|
||||
|
||||
|
||||
def all_test(self):
|
||||
self.__test_current()
|
||||
self.__test_error()
|
||||
|
||||
|
||||
def __create_tb(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,361 @@
|
|||
import datetime
|
||||
|
||||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
ALL_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, BINARY_COL, NCHAR_COL, TS_COL ]
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
def __query_condition(self,tbname):
|
||||
query_condition = [f"cast({col} as bigint)" for col in ALL_COL]
|
||||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{num_col}",
|
||||
f"abs( {tbname}.{num_col} )",
|
||||
f"acos( {tbname}.{num_col} )",
|
||||
f"asin( {tbname}.{num_col} )",
|
||||
f"atan( {tbname}.{num_col} )",
|
||||
f"avg( {tbname}.{num_col} )",
|
||||
f"ceil( {tbname}.{num_col} )",
|
||||
f"cos( {tbname}.{num_col} )",
|
||||
f"count( {tbname}.{num_col} )",
|
||||
f"floor( {tbname}.{num_col} )",
|
||||
f"log( {tbname}.{num_col}, {tbname}.{num_col})",
|
||||
f"max( {tbname}.{num_col} )",
|
||||
f"min( {tbname}.{num_col} )",
|
||||
f"pow( {tbname}.{num_col}, 2)",
|
||||
f"round( {tbname}.{num_col} )",
|
||||
f"sum( {tbname}.{num_col} )",
|
||||
f"sin( {tbname}.{num_col} )",
|
||||
f"sqrt( {tbname}.{num_col} )",
|
||||
f"tan( {tbname}.{num_col} )",
|
||||
f"cast( {tbname}.{num_col} as timestamp)",
|
||||
)
|
||||
)
|
||||
query_condition.extend((f"{num_col} + {any_col}" for any_col in ALL_COL))
|
||||
for char_col in CHAR_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"count({tbname}.{char_col})",
|
||||
f"sum(cast({tbname}.{char_col}) as bigint)",
|
||||
f"max(cast({tbname}.{char_col}) as bigint)",
|
||||
f"min(cast({tbname}.{char_col}) as bigint)",
|
||||
f"avg(cast({tbname}.{char_col}) as bigint)",
|
||||
)
|
||||
)
|
||||
# query_condition.extend(
|
||||
# (
|
||||
# 1010,
|
||||
# )
|
||||
# )
|
||||
|
||||
return query_condition
|
||||
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
|
||||
table_reference = tb_list[0]
|
||||
join_condition = table_reference
|
||||
join = "inner join" if INNER else "join"
|
||||
for i in range(len(tb_list[1:])):
|
||||
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
|
||||
|
||||
return join_condition
|
||||
|
||||
def __where_condition(self, col=None, tbname=None, query_conditon=None):
|
||||
if query_conditon and isinstance(query_conditon, str):
|
||||
if query_conditon.startswith("count"):
|
||||
query_conditon = query_conditon[6:-1]
|
||||
elif query_conditon.startswith("max"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("sum"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("min"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
|
||||
if query_conditon:
|
||||
return f" where {query_conditon} is not null"
|
||||
if col in NUM_COL:
|
||||
return f" where abs( {tbname}.{col} ) >= 0"
|
||||
if col in CHAR_COL:
|
||||
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
if col in BOOLEAN_COL:
|
||||
return f" where {tbname}.{col} in (false, true) "
|
||||
if col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
|
||||
return ""
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
col = col[6:-1]
|
||||
elif col.startswith("max"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("sum"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("min"):
|
||||
col = col[4:-1]
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
|
||||
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
|
||||
return
|
||||
return f"select hyperloglog({select_clause}) from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
@property
|
||||
def __tb_list(self):
|
||||
return [
|
||||
"ct1",
|
||||
"ct4",
|
||||
"t1",
|
||||
"ct2",
|
||||
"stb1",
|
||||
]
|
||||
|
||||
def sql_list(self):
|
||||
sqls = []
|
||||
__no_join_tblist = self.__tb_list
|
||||
for tb in __no_join_tblist:
|
||||
select_claus_list = self.__query_condition(tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition(col=select_claus)
|
||||
where_claus = self.__where_condition(query_conditon=select_claus)
|
||||
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, tb, where_claus, having_claus),
|
||||
self.__single_sql(select_claus, tb,),
|
||||
self.__single_sql(select_claus, tb, where_condition=where_claus),
|
||||
self.__single_sql(select_claus, tb, group_condition=group_claus),
|
||||
)
|
||||
)
|
||||
|
||||
# return filter(None, sqls)
|
||||
return list(filter(None, sqls))
|
||||
|
||||
def __get_type(self, col):
|
||||
if tdSql.cursor.istype(col, "BOOL"):
|
||||
return "BOOL"
|
||||
if tdSql.cursor.istype(col, "INT"):
|
||||
return "INT"
|
||||
if tdSql.cursor.istype(col, "BIGINT"):
|
||||
return "BIGINT"
|
||||
if tdSql.cursor.istype(col, "TINYINT"):
|
||||
return "TINYINT"
|
||||
if tdSql.cursor.istype(col, "SMALLINT"):
|
||||
return "SMALLINT"
|
||||
if tdSql.cursor.istype(col, "FLOAT"):
|
||||
return "FLOAT"
|
||||
if tdSql.cursor.istype(col, "DOUBLE"):
|
||||
return "DOUBLE"
|
||||
if tdSql.cursor.istype(col, "BINARY"):
|
||||
return "BINARY"
|
||||
if tdSql.cursor.istype(col, "NCHAR"):
|
||||
return "NCHAR"
|
||||
if tdSql.cursor.istype(col, "TIMESTAMP"):
|
||||
return "TIMESTAMP"
|
||||
if tdSql.cursor.istype(col, "JSON"):
|
||||
return "JSON"
|
||||
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
|
||||
return "TINYINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
|
||||
return "SMALLINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "INT UNSIGNED"):
|
||||
return "INT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
|
||||
return "BIGINT UNSIGNED"
|
||||
|
||||
def spread_check(self):
|
||||
sqls = self.sql_list()
|
||||
tdLog.printNoPrefix("===step 1: curent case, must return query OK")
|
||||
for i in range(len(sqls)):
|
||||
tdLog.info(f"sql: {sqls[i]}")
|
||||
tdSql.query(sqls[i])
|
||||
|
||||
def __test_current(self):
|
||||
tdSql.query("select hyperloglog(ts) from ct1")
|
||||
tdSql.checkRows(1)
|
||||
tdSql.query("select hyperloglog(c1) from ct2")
|
||||
tdSql.checkRows(1)
|
||||
tdSql.query("select hyperloglog(c1) from ct4 group by c1")
|
||||
tdSql.checkRows(self.rows + 3)
|
||||
tdSql.query("select hyperloglog(c1) from ct4 group by c7")
|
||||
tdSql.checkRows(3)
|
||||
tdSql.query("select hyperloglog(ct2.c1) from ct4 join ct2 on ct4.ts=ct2.ts")
|
||||
tdSql.checkRows(1)
|
||||
tdSql.checkData(0, 0, self.rows + 2)
|
||||
tdSql.query("select hyperloglog(c1), c1 from stb1 group by c1")
|
||||
for i in range(tdSql.queryRows):
|
||||
tdSql.checkData(i, 0, 1) if tdSql.queryResult[i][1] is not None else tdSql.checkData(i, 0, 0)
|
||||
|
||||
|
||||
|
||||
self.spread_check()
|
||||
|
||||
def __test_error(self):
|
||||
|
||||
tdLog.printNoPrefix("===step 0: err case, must return err")
|
||||
tdSql.error( "select hyperloglog() from ct1" )
|
||||
tdSql.error( "select hyperloglog(c1, c2) from ct2" )
|
||||
tdSql.error( "select hyperloglog(1) from ct2" )
|
||||
tdSql.error( f"select hyperloglog({NUM_COL[0]}, {NUM_COL[1]}) from ct4" )
|
||||
tdSql.error( ''' select hyperloglog(['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'])
|
||||
from ct1
|
||||
where ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'] is not null
|
||||
group by ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10']
|
||||
having ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'] is not null ''' )
|
||||
|
||||
def all_test(self):
|
||||
self.__test_error()
|
||||
self.__test_current()
|
||||
|
||||
def __create_tb(self):
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -36,17 +36,14 @@ class TDTestCase:
|
|||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{char_col}",
|
||||
f"upper( {tbname}.{char_col} )",
|
||||
# f"upper( {tbname}.{char_col} )",
|
||||
)
|
||||
)
|
||||
query_condition.extend( f"cast( {tbname}.{un_char_col} as binary(16) ) " for un_char_col in NUM_COL)
|
||||
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
|
||||
query_condition.extend( f"cast( {tbname}.{char_col} + {tbname}.{un_char_col} as binary(32) ) " for un_char_col in NUM_COL )
|
||||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{num_col}",
|
||||
f"sin( {tbname}.{num_col} )"
|
||||
f"sin( {tbname}.{num_col} )",
|
||||
)
|
||||
)
|
||||
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{num_col_1} " for num_col_1 in NUM_COL )
|
||||
|
@ -55,41 +52,115 @@ class TDTestCase:
|
|||
|
||||
return query_condition
|
||||
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL):
|
||||
# sourcery skip: flip-comparison
|
||||
if 1 == len(tb_list):
|
||||
join_filter = f"{tb_list[0]}.{filter} = {tb_list[0]}.{filter} "
|
||||
elif 2 == len(tb_list):
|
||||
join_filter = f"{tb_list[0]}.{filter} = {tb_list[1]}.{filter} "
|
||||
else:
|
||||
join_filter = f"{tb_list[0]}.{filter} = {tb_list[1]}.{filter} "
|
||||
for i in range(1, len(tb_list)-1 ):
|
||||
join_filter += f"and {tb_list[i]}.{filter} = {tb_list[i+1]}.{filter}"
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
|
||||
table_reference = tb_list[0]
|
||||
join_condition = table_reference
|
||||
join = "inner join" if INNER else "join"
|
||||
for i in range(len(tb_list[1:])):
|
||||
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
|
||||
|
||||
return join_filter
|
||||
return join_condition
|
||||
|
||||
def __where_condition(self, col, tbname):
|
||||
def __where_condition(self, col=None, tbname=None, query_conditon=None):
|
||||
if query_conditon and isinstance(query_conditon, str):
|
||||
if query_conditon.startswith("count"):
|
||||
query_conditon = query_conditon[6:-1]
|
||||
elif query_conditon.startswith("max"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("sum"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("min"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
|
||||
if query_conditon:
|
||||
return f" where {query_conditon} is not null"
|
||||
if col in NUM_COL:
|
||||
return f" abs( {tbname}.{col} ) >= 0"
|
||||
elif col in CHAR_COL:
|
||||
return f" lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
elif col in BOOLEAN_COL:
|
||||
return f" {tbname}.{col} in (false, true) "
|
||||
elif col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
else:
|
||||
return ""
|
||||
return f" where abs( {tbname}.{col} ) >= 0"
|
||||
if col in CHAR_COL:
|
||||
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
if col in BOOLEAN_COL:
|
||||
return f" where {tbname}.{col} in (false, true) "
|
||||
if col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
|
||||
def __group_condition(self, tbname, col, having = ""):
|
||||
return ""
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
col = col[6:-1]
|
||||
elif col.startswith("max"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("sum"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("min"):
|
||||
col = col[4:-1]
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __join_check(self, tblist, checkrows, join_flag=True):
|
||||
def __gen_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
|
||||
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
|
||||
return
|
||||
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
@property
|
||||
def __join_tblist(self):
|
||||
return [
|
||||
# ["ct1", "ct2"],
|
||||
["ct1", "ct4"],
|
||||
["ct1", "t1"],
|
||||
# ["ct2", "ct4"],
|
||||
# ["ct2", "t1"],
|
||||
# ["ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4"],
|
||||
# ["ct1", "ct2", "t1"],
|
||||
# ["ct1", "ct4", "t1"],
|
||||
# ["ct2", "ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4", "t1"],
|
||||
]
|
||||
|
||||
@property
|
||||
def __sqls_list(self):
|
||||
sqls = []
|
||||
__join_tblist = self.__join_tblist
|
||||
for join_tblist in __join_tblist:
|
||||
for join_tb in join_tblist:
|
||||
select_claus_list = self.__query_condition(join_tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition( col=select_claus)
|
||||
where_claus = self.__where_condition( query_conditon=select_claus )
|
||||
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null" )
|
||||
sqls.extend(
|
||||
(
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus, group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist), group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist)),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, ),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), having_claus ),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), group_claus ),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True) ),
|
||||
)
|
||||
)
|
||||
return list(filter(None, sqls))
|
||||
|
||||
def __join_check(self,):
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
for i in range(len(self.__sqls_list)):
|
||||
tdSql.query(self.__sqls_list[i])
|
||||
# if i % 10 == 0 :
|
||||
# tdLog.success(f"{i} sql is already executed success !")
|
||||
|
||||
def __join_check_old(self, tblist, checkrows, join_flag=True):
|
||||
query_conditions = self.__query_condition(tblist[0])
|
||||
join_condition = self.__join_condition(tb_list=tblist) if join_flag else " "
|
||||
for condition in query_conditions:
|
||||
where_condition = self.__where_condition(col=condition, tbname=tblist[0])
|
||||
group_having = self.__group_condition(tbname=tblist[0], col=condition, having=f"{condition} is not null " )
|
||||
group_no_having= self.__group_condition(tbname=tblist[0], col=condition )
|
||||
group_having = self.__group_condition(col=condition, having=f"{condition} is not null " )
|
||||
group_no_having= self.__group_condition(col=condition )
|
||||
groups = ["", group_having, group_no_having]
|
||||
for group_condition in groups:
|
||||
if where_condition:
|
||||
|
@ -116,23 +187,6 @@ class TDTestCase:
|
|||
tdSql.query(sql=sql)
|
||||
# tdSql.checkRows(checkrows)
|
||||
|
||||
|
||||
def __test_current(self):
|
||||
# sourcery skip: extract-duplicate-method, inline-immediately-returned-variable
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
tblist_1 = ["ct1", "ct2"]
|
||||
self.__join_check(tblist_1, 1)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tblist_1} over==========")
|
||||
tblist_2 = ["ct2", "ct4"]
|
||||
self.__join_check(tblist_2, self.rows)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tblist_2} over==========")
|
||||
tblist_3 = ["t1", "ct4"]
|
||||
self.__join_check(tblist_3, 1)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tblist_3} over==========")
|
||||
tblist_4 = ["t1", "ct1"]
|
||||
self.__join_check(tblist_4, 1)
|
||||
tdLog.printNoPrefix(f"==========current sql condition check in {tblist_4} over==========")
|
||||
|
||||
def __test_error(self):
|
||||
# sourcery skip: extract-duplicate-method, move-assign-in-block
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
|
@ -141,17 +195,17 @@ class TDTestCase:
|
|||
err_list_3 = ["ct1","ct4", "t1"]
|
||||
err_list_4 = ["ct2","ct4", "t1"]
|
||||
err_list_5 = ["ct1", "ct2","ct4", "t1"]
|
||||
self.__join_check(err_list_1, -1)
|
||||
self.__join_check_old(err_list_1, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_1} over==========")
|
||||
self.__join_check(err_list_2, -1)
|
||||
self.__join_check_old(err_list_2, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_2} over==========")
|
||||
self.__join_check(err_list_3, -1)
|
||||
self.__join_check_old(err_list_3, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_3} over==========")
|
||||
self.__join_check(err_list_4, -1)
|
||||
self.__join_check_old(err_list_4, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_4} over==========")
|
||||
self.__join_check(err_list_5, -1)
|
||||
self.__join_check_old(err_list_5, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_5} over==========")
|
||||
self.__join_check(["ct2", "ct4"], -1, join_flag=False)
|
||||
self.__join_check_old(["ct2", "ct4"], -1, join_flag=False)
|
||||
tdLog.printNoPrefix("==========err sql condition check in has no join condition over==========")
|
||||
|
||||
tdSql.error( f"select c1, c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{PRIMARY_COL}" )
|
||||
|
@ -172,7 +226,7 @@ class TDTestCase:
|
|||
|
||||
|
||||
def all_test(self):
|
||||
self.__test_current()
|
||||
self.__join_check()
|
||||
self.__test_error()
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,357 @@
|
|||
import datetime
|
||||
|
||||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor(), True)
|
||||
|
||||
def __query_condition(self,tbname):
|
||||
query_condition = []
|
||||
for char_col in CHAR_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{char_col}",
|
||||
# f"upper( {tbname}.{char_col} )",
|
||||
)
|
||||
)
|
||||
query_condition.extend( f"cast( {tbname}.{un_char_col} as binary(16) ) " for un_char_col in NUM_COL)
|
||||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"sin( {tbname}.{num_col} )",
|
||||
)
|
||||
)
|
||||
query_condition.extend( f"{tbname}.{num_col} + {tbname}.{num_col_1} " for num_col_1 in NUM_COL )
|
||||
|
||||
query_condition.append(''' "test1234!@#$%^&*():'><?/.,][}{" ''')
|
||||
|
||||
return query_condition
|
||||
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
|
||||
table_reference = tb_list[0]
|
||||
join_condition = table_reference
|
||||
join = "inner join" if INNER else "join"
|
||||
for i in range(len(tb_list[1:])):
|
||||
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
|
||||
|
||||
return join_condition
|
||||
|
||||
def __where_condition(self, col=None, tbname=None, query_conditon=None):
|
||||
if query_conditon and isinstance(query_conditon, str):
|
||||
if query_conditon.startswith("count"):
|
||||
query_conditon = query_conditon[6:-1]
|
||||
elif query_conditon.startswith("max"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("sum"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("min"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
|
||||
if query_conditon:
|
||||
return f" where {query_conditon} is not null"
|
||||
if col in NUM_COL:
|
||||
return f" where abs( {tbname}.{col} ) >= 0"
|
||||
if col in CHAR_COL:
|
||||
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
if col in BOOLEAN_COL:
|
||||
return f" where {tbname}.{col} in (false, true) "
|
||||
if col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
|
||||
return ""
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
col = col[6:-1]
|
||||
elif col.startswith("max"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("sum"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("min"):
|
||||
col = col[4:-1]
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __gen_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
|
||||
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
|
||||
return
|
||||
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
@property
|
||||
def __join_tblist(self):
|
||||
return [
|
||||
# ["ct1", "ct2"],
|
||||
# ["ct1", "ct4"],
|
||||
# ["ct1", "t1"],
|
||||
["ct2", "ct4"],
|
||||
# ["ct2", "t1"],
|
||||
["ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4"],
|
||||
# ["ct1", "ct2", "t1"],
|
||||
# ["ct1", "ct4", "t1"],
|
||||
# ["ct2", "ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4", "t1"],
|
||||
]
|
||||
|
||||
@property
|
||||
def __sqls_list(self):
|
||||
sqls = []
|
||||
__join_tblist = self.__join_tblist
|
||||
for join_tblist in __join_tblist:
|
||||
for join_tb in join_tblist:
|
||||
select_claus_list = self.__query_condition(join_tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition( col=select_claus)
|
||||
where_claus = self.__where_condition( query_conditon=select_claus )
|
||||
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null" )
|
||||
sqls.extend(
|
||||
(
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus, group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), where_claus),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist), group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist), having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist)),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, group_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, ),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), having_claus ),
|
||||
# self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True), group_claus ),
|
||||
self.__gen_sql(select_claus, self.__join_condition(join_tblist, INNER=True) ),
|
||||
)
|
||||
)
|
||||
return list(filter(None, sqls))
|
||||
|
||||
def __join_check(self,):
|
||||
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
|
||||
for i in range(len(self.__sqls_list)):
|
||||
tdSql.query(self.__sqls_list[i])
|
||||
# if i % 10 == 0 :
|
||||
# tdLog.success(f"{i} sql is already executed success !")
|
||||
|
||||
def __join_check_old(self, tblist, checkrows, join_flag=True):
|
||||
query_conditions = self.__query_condition(tblist[0])
|
||||
join_condition = self.__join_condition(tb_list=tblist) if join_flag else " "
|
||||
for condition in query_conditions:
|
||||
where_condition = self.__where_condition(col=condition, tbname=tblist[0])
|
||||
group_having = self.__group_condition(col=condition, having=f"{condition} is not null " )
|
||||
group_no_having= self.__group_condition(col=condition )
|
||||
groups = ["", group_having, group_no_having]
|
||||
for group_condition in groups:
|
||||
if where_condition:
|
||||
sql = f" select {condition} from {tblist[0]},{tblist[1]} where {join_condition} and {where_condition} {group_condition} "
|
||||
else:
|
||||
sql = f" select {condition} from {tblist[0]},{tblist[1]} where {join_condition} {group_condition} "
|
||||
|
||||
if not join_flag :
|
||||
tdSql.error(sql=sql)
|
||||
break
|
||||
if len(tblist) == 2:
|
||||
if "ct1" in tblist or "t1" in tblist:
|
||||
self.__join_current(sql, checkrows)
|
||||
elif where_condition or "not null" in group_condition:
|
||||
self.__join_current(sql, checkrows + 2 )
|
||||
elif group_condition:
|
||||
self.__join_current(sql, checkrows + 3 )
|
||||
else:
|
||||
self.__join_current(sql, checkrows + 5 )
|
||||
if len(tblist) > 2 or len(tblist) < 1:
|
||||
tdSql.error(sql=sql)
|
||||
|
||||
def __join_current(self, sql, checkrows):
|
||||
tdSql.query(sql=sql)
|
||||
# tdSql.checkRows(checkrows)
|
||||
|
||||
def __test_error(self):
|
||||
# sourcery skip: extract-duplicate-method, move-assign-in-block
|
||||
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
|
||||
err_list_1 = ["ct1","ct2", "ct4"]
|
||||
err_list_2 = ["ct1","ct2", "t1"]
|
||||
err_list_3 = ["ct1","ct4", "t1"]
|
||||
err_list_4 = ["ct2","ct4", "t1"]
|
||||
err_list_5 = ["ct1", "ct2","ct4", "t1"]
|
||||
self.__join_check_old(err_list_1, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_1} over==========")
|
||||
self.__join_check_old(err_list_2, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_2} over==========")
|
||||
self.__join_check_old(err_list_3, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_3} over==========")
|
||||
self.__join_check_old(err_list_4, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_4} over==========")
|
||||
self.__join_check_old(err_list_5, -1)
|
||||
tdLog.printNoPrefix(f"==========err sql condition check in {err_list_5} over==========")
|
||||
self.__join_check_old(["ct2", "ct4"], -1, join_flag=False)
|
||||
tdLog.printNoPrefix("==========err sql condition check in has no join condition over==========")
|
||||
|
||||
tdSql.error( f"select c1, c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{PRIMARY_COL}" )
|
||||
tdSql.error( f"select ct2.c1, ct2.c2 from ct2, ct4 where ct2.{INT_COL}=ct4.{INT_COL}" )
|
||||
tdSql.error( f"select ct2.c1, ct2.c2 from ct2, ct4 where ct2.{TS_COL}=ct4.{TS_COL}" )
|
||||
tdSql.error( f"select ct2.c1, ct2.c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{TS_COL}" )
|
||||
tdSql.error( f"select ct2.c1, ct1.c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{PRIMARY_COL}" )
|
||||
tdSql.error( f"select ct2.c1, ct4.c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{PRIMARY_COL} and c1 is not null " )
|
||||
tdSql.error( f"select ct2.c1, ct4.c2 from ct2, ct4 where ct2.{PRIMARY_COL}=ct4.{PRIMARY_COL} and ct1.c1 is not null " )
|
||||
|
||||
|
||||
tbname = ["ct1", "ct2", "ct4", "t1"]
|
||||
|
||||
# for tb in tbname:
|
||||
# for errsql in self.__join_err_check(tb):
|
||||
# tdSql.error(sql=errsql)
|
||||
# tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
|
||||
|
||||
|
||||
def all_test(self):
|
||||
self.__join_check()
|
||||
self.__test_error()
|
||||
|
||||
|
||||
def __create_tb(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -0,0 +1,358 @@
|
|||
import datetime
|
||||
|
||||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
ALL_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, BINARY_COL, NCHAR_COL, TS_COL ]
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
def __query_condition(self,tbname):
|
||||
query_condition = [f"cast({col} as bigint)" for col in ALL_COL]
|
||||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{num_col}",
|
||||
f"abs( {tbname}.{num_col} )",
|
||||
f"acos( {tbname}.{num_col} )",
|
||||
f"asin( {tbname}.{num_col} )",
|
||||
f"atan( {tbname}.{num_col} )",
|
||||
f"avg( {tbname}.{num_col} )",
|
||||
f"ceil( {tbname}.{num_col} )",
|
||||
f"cos( {tbname}.{num_col} )",
|
||||
f"count( {tbname}.{num_col} )",
|
||||
f"floor( {tbname}.{num_col} )",
|
||||
f"log( {tbname}.{num_col}, {tbname}.{num_col})",
|
||||
f"max( {tbname}.{num_col} )",
|
||||
f"min( {tbname}.{num_col} )",
|
||||
f"pow( {tbname}.{num_col}, 2)",
|
||||
f"round( {tbname}.{num_col} )",
|
||||
f"sum( {tbname}.{num_col} )",
|
||||
f"sin( {tbname}.{num_col} )",
|
||||
f"sqrt( {tbname}.{num_col} )",
|
||||
f"tan( {tbname}.{num_col} )",
|
||||
f"cast( {tbname}.{num_col} as timestamp)",
|
||||
)
|
||||
)
|
||||
[ query_condition.append(f"{num_col} + {any_col}") for any_col in ALL_COL ]
|
||||
for char_col in CHAR_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"count({tbname}.{char_col})",
|
||||
f"sum(cast({tbname}.{char_col}) as bigint)",
|
||||
f"max(cast({tbname}.{char_col}) as bigint)",
|
||||
f"min(cast({tbname}.{char_col}) as bigint)",
|
||||
f"avg(cast({tbname}.{char_col}) as bigint)",
|
||||
)
|
||||
)
|
||||
query_condition.extend(
|
||||
(
|
||||
1010,
|
||||
)
|
||||
)
|
||||
|
||||
return query_condition
|
||||
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
|
||||
table_reference = tb_list[0]
|
||||
join_condition = table_reference
|
||||
join = "inner join" if INNER else "join"
|
||||
for i in range(len(tb_list[1:])):
|
||||
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
|
||||
|
||||
return join_condition
|
||||
|
||||
def __where_condition(self, col=None, tbname=None, query_conditon=None):
|
||||
if query_conditon and isinstance(query_conditon, str):
|
||||
if query_conditon.startswith("count"):
|
||||
query_conditon = query_conditon[6:-1]
|
||||
elif query_conditon.startswith("max"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("sum"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("min"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
|
||||
if query_conditon:
|
||||
return f" where {query_conditon} is not null"
|
||||
if col in NUM_COL:
|
||||
return f" where abs( {tbname}.{col} ) >= 0"
|
||||
if col in CHAR_COL:
|
||||
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
if col in BOOLEAN_COL:
|
||||
return f" where {tbname}.{col} in (false, true) "
|
||||
if col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
|
||||
return ""
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
col = col[6:-1]
|
||||
elif col.startswith("max"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("sum"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("min"):
|
||||
col = col[4:-1]
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
|
||||
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
|
||||
return
|
||||
return f"select spread({select_clause}) from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
@property
|
||||
def __tb_list(self):
|
||||
return [
|
||||
"ct1",
|
||||
"ct4",
|
||||
"t1",
|
||||
"ct2",
|
||||
"stb1",
|
||||
]
|
||||
|
||||
def sql_list(self):
|
||||
sqls = []
|
||||
__no_join_tblist = self.__tb_list
|
||||
for tb in __no_join_tblist:
|
||||
select_claus_list = self.__query_condition(tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition(col=select_claus)
|
||||
where_claus = self.__where_condition(query_conditon=select_claus)
|
||||
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, tb, where_claus, having_claus),
|
||||
self.__single_sql(select_claus, tb,),
|
||||
self.__single_sql(select_claus, tb, where_condition=where_claus),
|
||||
self.__single_sql(select_claus, tb, group_condition=group_claus),
|
||||
)
|
||||
)
|
||||
|
||||
# return filter(None, sqls)
|
||||
return list(filter(None, sqls))
|
||||
|
||||
def __get_type(self, col):
|
||||
if tdSql.cursor.istype(col, "BOOL"):
|
||||
return "BOOL"
|
||||
if tdSql.cursor.istype(col, "INT"):
|
||||
return "INT"
|
||||
if tdSql.cursor.istype(col, "BIGINT"):
|
||||
return "BIGINT"
|
||||
if tdSql.cursor.istype(col, "TINYINT"):
|
||||
return "TINYINT"
|
||||
if tdSql.cursor.istype(col, "SMALLINT"):
|
||||
return "SMALLINT"
|
||||
if tdSql.cursor.istype(col, "FLOAT"):
|
||||
return "FLOAT"
|
||||
if tdSql.cursor.istype(col, "DOUBLE"):
|
||||
return "DOUBLE"
|
||||
if tdSql.cursor.istype(col, "BINARY"):
|
||||
return "BINARY"
|
||||
if tdSql.cursor.istype(col, "NCHAR"):
|
||||
return "NCHAR"
|
||||
if tdSql.cursor.istype(col, "TIMESTAMP"):
|
||||
return "TIMESTAMP"
|
||||
if tdSql.cursor.istype(col, "JSON"):
|
||||
return "JSON"
|
||||
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
|
||||
return "TINYINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
|
||||
return "SMALLINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "INT UNSIGNED"):
|
||||
return "INT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
|
||||
return "BIGINT UNSIGNED"
|
||||
|
||||
def spread_check(self):
|
||||
sqls = self.sql_list()
|
||||
tdLog.printNoPrefix("===step 1: curent case, must return query OK")
|
||||
for i in range(len(sqls)):
|
||||
tdLog.info(f"sql: {sqls[i]}")
|
||||
tdSql.query(sqls[i])
|
||||
|
||||
def __test_current(self):
|
||||
tdSql.query("select spread(ts) from ct1")
|
||||
tdSql.checkRows(1)
|
||||
tdSql.query("select spread(c1) from ct2")
|
||||
tdSql.checkRows(1)
|
||||
tdSql.query("select spread(c1) from ct4 group by c1")
|
||||
tdSql.checkRows(self.rows + 3)
|
||||
tdSql.query("select spread(c1) from ct4 group by c7")
|
||||
tdSql.checkRows(3)
|
||||
tdSql.query("select spread(ct2.c1) from ct4 join ct2 on ct4.ts=ct2.ts")
|
||||
tdSql.checkRows(1)
|
||||
|
||||
self.spread_check()
|
||||
|
||||
def __test_error(self):
|
||||
|
||||
tdLog.printNoPrefix("===step 0: err case, must return err")
|
||||
tdSql.error( "select spread() from ct1" )
|
||||
tdSql.error( "select spread(1, 2) from ct2" )
|
||||
tdSql.error( f"select spread({NUM_COL[0]}, {NUM_COL[1]}) from ct4" )
|
||||
tdSql.error( f"select spread({BOOLEAN_COL[0]}) from t1" )
|
||||
tdSql.error( f"select spread({CHAR_COL[0]}) from stb1" )
|
||||
|
||||
# tdSql.error( ''' select spread(['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'])
|
||||
# from ct1
|
||||
# where ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'] is not null
|
||||
# group by ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10']
|
||||
# having ['c1 + c1', 'c1 + c2', 'c1 + c3', 'c1 + c4', 'c1 + c5', 'c1 + c6', 'c1 + c7', 'c1 + c8', 'c1 + c9', 'c1 + c10'] is not null ''' )
|
||||
# tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
|
||||
|
||||
def all_test(self):
|
||||
self.__test_error()
|
||||
self.__test_current()
|
||||
|
||||
def __create_tb(self):
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -218,13 +218,13 @@ class TDTestCase:
|
|||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
# tdDnodes.stop(1)
|
||||
# tdDnodes.start(1)
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
# tdSql.execute("use db")
|
||||
tdSql.execute("use db")
|
||||
|
||||
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
# self.all_test()
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
|
|
|
@ -35,8 +35,6 @@ class TDTestCase:
|
|||
for char_col in CHAR_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"rtrim( {tbname}.{char_col} )",
|
||||
f"substr( {tbname}.{char_col}, 1 )",
|
||||
f"count( {tbname}.{char_col} )",
|
||||
f"cast( {tbname}.{char_col} as nchar(3) )",
|
||||
)
|
||||
|
@ -45,11 +43,7 @@ class TDTestCase:
|
|||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"{tbname}.{num_col}",
|
||||
f"floor( {tbname}.{num_col} )",
|
||||
f"log( {tbname}.{num_col}, {tbname}.{num_col})",
|
||||
f"sin( {tbname}.{num_col} )",
|
||||
f"sqrt( {tbname}.{num_col} )",
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -96,7 +90,6 @@ class TDTestCase:
|
|||
|
||||
return ""
|
||||
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
|
@ -114,15 +107,10 @@ class TDTestCase:
|
|||
return
|
||||
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
|
||||
@property
|
||||
def __join_tblist(self):
|
||||
return [
|
||||
["ct1", "ct2"],
|
||||
["ct1", "ct4"],
|
||||
["ct1", "t1"],
|
||||
["ct2", "ct4"],
|
||||
["ct2", "t1"],
|
||||
["ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4"],
|
||||
# ["ct1", "ct2", "t1"],
|
||||
|
@ -135,9 +123,7 @@ class TDTestCase:
|
|||
def __tb_liast(self):
|
||||
return [
|
||||
"ct1",
|
||||
"ct2",
|
||||
"ct4",
|
||||
"t1",
|
||||
]
|
||||
|
||||
def sql_list(self):
|
||||
|
@ -152,15 +138,7 @@ class TDTestCase:
|
|||
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
|
||||
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
|
||||
self.__single_sql(select_claus, self.__join_condition(join_tblist), where_claus, having_claus),
|
||||
self.__single_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
|
||||
self.__single_sql(select_claus, join_tb, where_claus),
|
||||
self.__single_sql(select_claus, join_tb, having_claus),
|
||||
self.__single_sql(select_claus, join_tb, group_claus),
|
||||
self.__single_sql(select_claus, join_tb),
|
||||
|
||||
)
|
||||
)
|
||||
__no_join_tblist = self.__tb_liast
|
||||
|
@ -172,12 +150,7 @@ class TDTestCase:
|
|||
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, join_tb, where_claus, group_claus),
|
||||
self.__single_sql(select_claus, join_tb, where_claus, having_claus),
|
||||
self.__single_sql(select_claus, join_tb, where_claus),
|
||||
self.__single_sql(select_claus, join_tb, group_claus),
|
||||
self.__single_sql(select_claus, join_tb, having_claus),
|
||||
self.__single_sql(select_claus, join_tb),
|
||||
self.__single_sql(select_claus, tb, where_claus, having_claus),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -221,6 +194,8 @@ class TDTestCase:
|
|||
for i in range(len(sqls)):
|
||||
tdSql.query(sqls[i])
|
||||
res1_type = self.__get_type(0)
|
||||
# if i % 5 == 0:
|
||||
# tdLog.success(f"{i} : sql is already executing!")
|
||||
for j in range(len(sqls[i:])):
|
||||
tdSql.query(sqls[j+i])
|
||||
order_union_type = False
|
||||
|
@ -246,22 +221,12 @@ class TDTestCase:
|
|||
rev_order_type = True
|
||||
|
||||
if all_union_type:
|
||||
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
|
||||
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
|
||||
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.execute(f"{sqls[i]} union {sqls[j+i]}")
|
||||
tdSql.execute(f"{sqls[j+i]} union all {sqls[i]}")
|
||||
elif order_union_type:
|
||||
tdSql.query(f"{sqls[i]} union {sqls[j+i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.query(f"{sqls[i]} union all {sqls[j+i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.execute(f"{sqls[i]} union all {sqls[j+i]}")
|
||||
elif rev_order_type:
|
||||
tdSql.query(f"{sqls[j+i]} union {sqls[i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.query(f"{sqls[j+i]} union all {sqls[i]}")
|
||||
tdSql.checkCols(1)
|
||||
tdSql.execute(f"{sqls[j+i]} union {sqls[i]}")
|
||||
else:
|
||||
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
|
||||
|
||||
|
@ -273,7 +238,7 @@ class TDTestCase:
|
|||
tdSql.error( "select c1 from ct1 union all drop table ct3" )
|
||||
tdSql.error( "select c1 from ct1 union all '' " )
|
||||
tdSql.error( " '' union all select c1 from ct1 " )
|
||||
tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
|
||||
# tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
|
||||
|
||||
def all_test(self):
|
||||
self.__test_error()
|
||||
|
|
|
@ -0,0 +1,370 @@
|
|||
import datetime
|
||||
|
||||
from util.log import *
|
||||
from util.sql import *
|
||||
from util.cases import *
|
||||
from util.dnodes import *
|
||||
|
||||
PRIMARY_COL = "ts"
|
||||
|
||||
INT_COL = "c1"
|
||||
BINT_COL = "c2"
|
||||
SINT_COL = "c3"
|
||||
TINT_COL = "c4"
|
||||
FLOAT_COL = "c5"
|
||||
DOUBLE_COL = "c6"
|
||||
BOOL_COL = "c7"
|
||||
|
||||
BINARY_COL = "c8"
|
||||
NCHAR_COL = "c9"
|
||||
TS_COL = "c10"
|
||||
|
||||
NUM_COL = [ INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, ]
|
||||
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
|
||||
BOOLEAN_COL = [ BOOL_COL, ]
|
||||
TS_TYPE_COL = [ TS_COL, ]
|
||||
|
||||
class TDTestCase:
|
||||
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
def __query_condition(self,tbname):
|
||||
query_condition = []
|
||||
for char_col in CHAR_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"count( {tbname}.{char_col} )",
|
||||
f"cast( {tbname}.{char_col} as nchar(3) )",
|
||||
)
|
||||
)
|
||||
|
||||
for num_col in NUM_COL:
|
||||
query_condition.extend(
|
||||
(
|
||||
f"log( {tbname}.{num_col}, {tbname}.{num_col})",
|
||||
)
|
||||
)
|
||||
|
||||
query_condition.extend(
|
||||
(
|
||||
''' "test12" ''',
|
||||
# 1010,
|
||||
)
|
||||
)
|
||||
|
||||
return query_condition
|
||||
|
||||
def __join_condition(self, tb_list, filter=PRIMARY_COL, INNER=False):
|
||||
table_reference = tb_list[0]
|
||||
join_condition = table_reference
|
||||
join = "inner join" if INNER else "join"
|
||||
for i in range(len(tb_list[1:])):
|
||||
join_condition += f" {join} {tb_list[i+1]} on {table_reference}.{filter}={tb_list[i+1]}.{filter}"
|
||||
|
||||
return join_condition
|
||||
|
||||
def __where_condition(self, col=None, tbname=None, query_conditon=None):
|
||||
if query_conditon and isinstance(query_conditon, str):
|
||||
if query_conditon.startswith("count"):
|
||||
query_conditon = query_conditon[6:-1]
|
||||
elif query_conditon.startswith("max"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("sum"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
elif query_conditon.startswith("min"):
|
||||
query_conditon = query_conditon[4:-1]
|
||||
|
||||
|
||||
if query_conditon:
|
||||
return f" where {query_conditon} is not null"
|
||||
if col in NUM_COL:
|
||||
return f" where abs( {tbname}.{col} ) >= 0"
|
||||
if col in CHAR_COL:
|
||||
return f" where lower( {tbname}.{col} ) like 'bina%' or lower( {tbname}.{col} ) like '_cha%' "
|
||||
if col in BOOLEAN_COL:
|
||||
return f" where {tbname}.{col} in (false, true) "
|
||||
if col in TS_TYPE_COL or col in PRIMARY_COL:
|
||||
return f" where cast( {tbname}.{col} as binary(16) ) is not null "
|
||||
|
||||
return ""
|
||||
|
||||
def __group_condition(self, col, having = None):
|
||||
if isinstance(col, str):
|
||||
if col.startswith("count"):
|
||||
col = col[6:-1]
|
||||
elif col.startswith("max"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("sum"):
|
||||
col = col[4:-1]
|
||||
elif col.startswith("min"):
|
||||
col = col[4:-1]
|
||||
return f" group by {col} having {having}" if having else f" group by {col} "
|
||||
|
||||
def __single_sql(self, select_clause, from_clause, where_condition="", group_condition=""):
|
||||
if isinstance(select_clause, str) and "on" not in from_clause and select_clause.split(".")[0] != from_clause.split(".")[0]:
|
||||
return
|
||||
return f"select {select_clause} from {from_clause} {where_condition} {group_condition}"
|
||||
|
||||
@property
|
||||
def __join_tblist(self):
|
||||
return [
|
||||
["ct1", "ct2"],
|
||||
# ["ct1", "ct2", "ct4"],
|
||||
# ["ct1", "ct2", "t1"],
|
||||
# ["ct1", "ct4", "t1"],
|
||||
# ["ct2", "ct4", "t1"],
|
||||
# ["ct1", "ct2", "ct4", "t1"],
|
||||
]
|
||||
|
||||
@property
|
||||
def __tb_liast(self):
|
||||
return [
|
||||
"t1",
|
||||
"stb1",
|
||||
]
|
||||
|
||||
def sql_list(self):
|
||||
sqls = []
|
||||
__join_tblist = self.__join_tblist
|
||||
for join_tblist in __join_tblist:
|
||||
for join_tb in join_tblist:
|
||||
select_claus_list = self.__query_condition(join_tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition( col=select_claus)
|
||||
where_claus = self.__where_condition(query_conditon=select_claus)
|
||||
having_claus = self.__group_condition( col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, self.__join_condition(join_tblist, INNER=True), where_claus, having_claus),
|
||||
)
|
||||
)
|
||||
__no_join_tblist = self.__tb_liast
|
||||
for tb in __no_join_tblist:
|
||||
select_claus_list = self.__query_condition(tb)
|
||||
for select_claus in select_claus_list:
|
||||
group_claus = self.__group_condition(col=select_claus)
|
||||
where_claus = self.__where_condition(query_conditon=select_claus)
|
||||
having_claus = self.__group_condition(col=select_claus, having=f"{select_claus} is not null")
|
||||
sqls.extend(
|
||||
(
|
||||
self.__single_sql(select_claus, tb, where_claus, having_claus),
|
||||
)
|
||||
)
|
||||
|
||||
# return filter(None, sqls)
|
||||
return list(filter(None, sqls))
|
||||
|
||||
def __get_type(self, col):
|
||||
if tdSql.cursor.istype(col, "BOOL"):
|
||||
return "BOOL"
|
||||
if tdSql.cursor.istype(col, "INT"):
|
||||
return "INT"
|
||||
if tdSql.cursor.istype(col, "BIGINT"):
|
||||
return "BIGINT"
|
||||
if tdSql.cursor.istype(col, "TINYINT"):
|
||||
return "TINYINT"
|
||||
if tdSql.cursor.istype(col, "SMALLINT"):
|
||||
return "SMALLINT"
|
||||
if tdSql.cursor.istype(col, "FLOAT"):
|
||||
return "FLOAT"
|
||||
if tdSql.cursor.istype(col, "DOUBLE"):
|
||||
return "DOUBLE"
|
||||
if tdSql.cursor.istype(col, "BINARY"):
|
||||
return "BINARY"
|
||||
if tdSql.cursor.istype(col, "NCHAR"):
|
||||
return "NCHAR"
|
||||
if tdSql.cursor.istype(col, "TIMESTAMP"):
|
||||
return "TIMESTAMP"
|
||||
if tdSql.cursor.istype(col, "JSON"):
|
||||
return "JSON"
|
||||
if tdSql.cursor.istype(col, "TINYINT UNSIGNED"):
|
||||
return "TINYINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "SMALLINT UNSIGNED"):
|
||||
return "SMALLINT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "INT UNSIGNED"):
|
||||
return "INT UNSIGNED"
|
||||
if tdSql.cursor.istype(col, "BIGINT UNSIGNED"):
|
||||
return "BIGINT UNSIGNED"
|
||||
|
||||
def union_check(self):
|
||||
sqls = self.sql_list()
|
||||
for i in range(len(sqls)):
|
||||
tdSql.query(sqls[i])
|
||||
res1_type = self.__get_type(0)
|
||||
# if i % 5 == 0:
|
||||
# tdLog.success(f"{i} : sql is already executing!")
|
||||
for j in range(len(sqls[i:])):
|
||||
tdSql.query(sqls[j+i])
|
||||
order_union_type = False
|
||||
rev_order_type = False
|
||||
all_union_type = False
|
||||
res2_type = self.__get_type(0)
|
||||
|
||||
if res2_type == res1_type:
|
||||
all_union_type = True
|
||||
elif res1_type in ( "BIGINT" , "NCHAR" ) and res2_type in ("BIGINT" , "NCHAR"):
|
||||
all_union_type = True
|
||||
elif res1_type in ("BIGINT", "NCHAR"):
|
||||
order_union_type = True
|
||||
elif res2_type in ("BIGINT", "NCHAR"):
|
||||
rev_order_type = True
|
||||
elif res1_type == "TIMESAMP" and res2_type not in ("BINARY", "NCHAR"):
|
||||
order_union_type = True
|
||||
elif res2_type == "TIMESAMP" and res1_type not in ("BINARY", "NCHAR"):
|
||||
rev_order_type = True
|
||||
elif res1_type == "BINARY" and res2_type != "NCHAR":
|
||||
order_union_type = True
|
||||
elif res2_type == "BINARY" and res1_type != "NCHAR":
|
||||
rev_order_type = True
|
||||
|
||||
if all_union_type:
|
||||
tdSql.execute(f"{sqls[i]} union {sqls[j+i]}")
|
||||
tdSql.execute(f"{sqls[j+i]} union all {sqls[i]}")
|
||||
elif order_union_type:
|
||||
tdSql.execute(f"{sqls[i]} union all {sqls[j+i]}")
|
||||
elif rev_order_type:
|
||||
tdSql.execute(f"{sqls[j+i]} union {sqls[i]}")
|
||||
else:
|
||||
tdSql.error(f"{sqls[i]} union {sqls[j+i]}")
|
||||
|
||||
def __test_error(self):
|
||||
|
||||
tdSql.error( "show tables union show tables" )
|
||||
tdSql.error( "create table errtb1 union all create table errtb2" )
|
||||
tdSql.error( "drop table ct1 union all drop table ct3" )
|
||||
tdSql.error( "select c1 from ct1 union all drop table ct3" )
|
||||
tdSql.error( "select c1 from ct1 union all '' " )
|
||||
tdSql.error( " '' union all select c1 from ct1 " )
|
||||
# tdSql.error( "select c1 from ct1 union select c1 from ct2 union select c1 from ct4 ")
|
||||
|
||||
def all_test(self):
|
||||
self.__test_error()
|
||||
self.union_check()
|
||||
|
||||
|
||||
def __create_tb(self):
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
create_stb_sql = f'''create table stb1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
) tags (t1 int)
|
||||
'''
|
||||
create_ntb_sql = f'''create table t1(
|
||||
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
|
||||
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
|
||||
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
|
||||
)
|
||||
'''
|
||||
tdSql.execute(create_stb_sql)
|
||||
tdSql.execute(create_ntb_sql)
|
||||
|
||||
for i in range(4):
|
||||
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
|
||||
|
||||
def __insert_data(self, rows):
|
||||
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
|
||||
for i in range(rows):
|
||||
tdSql.execute(
|
||||
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar_测试_{i}', { now_time + 1 * i } )"
|
||||
)
|
||||
tdSql.execute(
|
||||
f'''insert into ct1 values
|
||||
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar_测试_0', { now_time + 8 } )
|
||||
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar_测试_9', { now_time + 9 } )
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct4 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000}
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000}
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''insert into ct2 values
|
||||
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3888000000 + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
(
|
||||
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
|
||||
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
|
||||
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
for i in range(rows):
|
||||
insert_data = f'''insert into t1 values
|
||||
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
|
||||
"binary_{i}", "nchar_测试_{i}", { now_time - 1000 * i } )
|
||||
'''
|
||||
tdSql.execute(insert_data)
|
||||
tdSql.execute(
|
||||
f'''insert into t1 values
|
||||
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
|
||||
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
|
||||
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
|
||||
"binary_limit-1", "nchar_测试_limit-1", { now_time - 86400000 }
|
||||
)
|
||||
(
|
||||
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
|
||||
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
|
||||
"binary_limit-2", "nchar_测试_limit-2", { now_time - 172800000 }
|
||||
)
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
def run(self):
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
self.__create_tb()
|
||||
|
||||
tdLog.printNoPrefix("==========step2:insert data")
|
||||
self.rows = 10
|
||||
self.__insert_data(self.rows)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:all check")
|
||||
self.all_test()
|
||||
|
||||
tdDnodes.stop(1)
|
||||
tdDnodes.start(1)
|
||||
|
||||
tdSql.execute("use db")
|
||||
|
||||
tdLog.printNoPrefix("==========step4:after wal, all check again ")
|
||||
self.all_test()
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -14,7 +14,7 @@ python3 ./test.py -f 0-others/udf_restart_taosd.py
|
|||
python3 ./test.py -f 0-others/user_control.py
|
||||
python3 ./test.py -f 0-others/fsync.py
|
||||
|
||||
#python3 ./test.py -f 2-query/between.py
|
||||
python3 ./test.py -f 2-query/between.py
|
||||
python3 ./test.py -f 2-query/distinct.py
|
||||
python3 ./test.py -f 2-query/varchar.py
|
||||
python3 ./test.py -f 2-query/ltrim.py
|
||||
|
@ -23,15 +23,19 @@ python3 ./test.py -f 2-query/length.py
|
|||
python3 ./test.py -f 2-query/char_length.py
|
||||
python3 ./test.py -f 2-query/upper.py
|
||||
python3 ./test.py -f 2-query/lower.py
|
||||
#python3 ./test.py -f 2-query/join.py
|
||||
python3 ./test.py -f 2-query/join.py
|
||||
python3 ./test.py -f 2-query/join2.py
|
||||
python3 ./test.py -f 2-query/cast.py
|
||||
#python3 ./test.py -f 2-query/concat.py
|
||||
#python3 ./test.py -f 2-query/concat_ws.py
|
||||
python3 ./test.py -f 2-query/union.py
|
||||
python3 ./test.py -f 2-query/union1.py
|
||||
python3 ./test.py -f 2-query/concat.py
|
||||
python3 ./test.py -f 2-query/concat2.py
|
||||
python3 ./test.py -f 2-query/concat_ws.py
|
||||
python3 ./test.py -f 2-query/concat_ws2.py
|
||||
python3 ./test.py -f 2-query/check_tsdb.py
|
||||
# python3 ./test.py -f 2-query/union.py
|
||||
# python3 ./test.py -f 2-query/union2.py
|
||||
# python3 ./test.py -f 2-query/union3.py
|
||||
# python3 ./test.py -f 2-query/union4.py
|
||||
python3 ./test.py -f 2-query/spread.py
|
||||
python3 ./test.py -f 2-query/hyperloglog.py
|
||||
|
||||
|
||||
python3 ./test.py -f 2-query/timezone.py
|
||||
python3 ./test.py -f 2-query/Now.py
|
||||
|
@ -47,7 +51,6 @@ python3 ./test.py -f 2-query/timetruncate.py
|
|||
python3 ./test.py -f 2-query/diff.py
|
||||
python3 ./test.py -f 2-query/Timediff.py
|
||||
|
||||
#python3 ./test.py -f 2-query/cast.py
|
||||
python3 ./test.py -f 2-query/top.py
|
||||
python3 ./test.py -f 2-query/bottom.py
|
||||
|
||||
|
@ -66,7 +69,7 @@ python3 ./test.py -f 2-query/arcsin.py
|
|||
python3 ./test.py -f 2-query/arccos.py
|
||||
python3 ./test.py -f 2-query/arctan.py
|
||||
python3 ./test.py -f 2-query/query_cols_tags_and_or.py
|
||||
#python3 ./test.py -f 2-query/nestedQuery.py
|
||||
# python3 ./test.py -f 2-query/nestedQuery.py
|
||||
|
||||
python3 ./test.py -f 7-tmq/basic5.py
|
||||
python3 ./test.py -f 7-tmq/subscribeDb.py
|
||||
|
@ -75,4 +78,3 @@ python3 ./test.py -f 7-tmq/subscribeStb.py
|
|||
python3 ./test.py -f 7-tmq/subscribeStb0.py
|
||||
python3 ./test.py -f 7-tmq/subscribeStb1.py
|
||||
python3 ./test.py -f 7-tmq/subscribeStb2.py
|
||||
|
||||
|
|
Loading…
Reference in New Issue