Merge pull request #14071 from taosdata/3.0test/jcy
test:update test case and add util file for test framework
This commit is contained in:
commit
f880d00656
|
@ -0,0 +1,46 @@
|
|||
###################################################################
|
||||
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is proprietary and confidential to TAOS Technologies.
|
||||
# No part of this file may be reproduced, stored, transmitted,
|
||||
# disclosed or used in any form or by any means other than as
|
||||
# expressly provided by the written permission from Jianhui Tao
|
||||
#
|
||||
###################################################################
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from util.sql import tdSql
|
||||
|
||||
class TDSetSql:
|
||||
def init(self, conn, logSql):
|
||||
tdSql.init(conn.cursor(), logSql)
|
||||
self.stbname = 'stb'
|
||||
|
||||
def set_create_normaltable_sql(self, ntbname='ntb',
|
||||
column_dict={'ts':'timestamp','col1':'tinyint','col2':'smallint','col3':'int','col4':'bigint','col5': 'unsigned int','col6': 'unsigned tinyint','col7': 'unsigned smallint',
|
||||
'col8': 'unsigned int','col9': 'unsigned bigint','col10': 'float','col11': 'double','col12': 'bool','col13': 'binary(20)','col14': 'nchar(20)'}):
|
||||
column_sql = ''
|
||||
for k, v in column_dict.items():
|
||||
column_sql += f"{k} {v},"
|
||||
create_ntb_sql = f'create table {ntbname} ({column_sql[:-1]})'
|
||||
return create_ntb_sql
|
||||
|
||||
def set_create_stable_sql(self,stbname='stb',
|
||||
column_dict={'ts':'timestamp','col1':'tinyint','col2':'smallint','col3':'int','col4':'bigint','col5': 'unsigned int','col6': 'unsigned tinyint','col7': 'unsigned smallint',
|
||||
'col8': 'unsigned int','col9': 'unsigned bigint','col10': 'float','col11': 'double','col12': 'bool','col13': 'binary(20)','col14': 'nchar(20)'},
|
||||
tag_dict={'ts_tag':'timestamp','t1':'tinyint','t2':'smallint','t3':'int','t4':'bigint','t5': 'unsigned int','t6': 'unsigned tinyint','t7': 'unsigned smallint',
|
||||
't8': 'unsigned int','t9': 'unsigned bigint','t10': 'float','t11': 'double','t12': 'bool','t13': 'binary(20)','t14': 'nchar(20)'}):
|
||||
column_sql = ''
|
||||
tag_sql = ''
|
||||
for k,v in column_dict.items():
|
||||
column_sql += f"{k} {v},"
|
||||
for k,v in tag_dict.items():
|
||||
tag_sql += f"{k} {v},"
|
||||
create_stb_sql = f'create table {stbname} ({column_sql[:-1]}) tags({tag_sql[:-1]})'
|
||||
return create_stb_sql
|
||||
|
||||
|
||||
|
||||
|
|
@ -11,103 +11,159 @@
|
|||
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import random
|
||||
import string
|
||||
from util.common import *
|
||||
from util.log import *
|
||||
from util.cases import *
|
||||
from util.sql import *
|
||||
|
||||
from util.sqlset import *
|
||||
|
||||
class TDTestCase:
|
||||
def init(self, conn, logSql):
|
||||
tdLog.debug("start to execute %s" % __file__)
|
||||
tdSql.init(conn.cursor())
|
||||
|
||||
self.setsql = TDSetSql()
|
||||
self.ntbname = 'ntb'
|
||||
self.rowNum = 10
|
||||
self.tbnum = 20
|
||||
self.ts = 1537146000000
|
||||
self.binary_str = 'taosdata'
|
||||
self.nchar_str = '涛思数据'
|
||||
def top_check_base(self):
|
||||
tdSql.prepare()
|
||||
tdSql.execute('''create table stb(ts timestamp, col1 tinyint, col2 smallint, col3 int, col4 bigint, col5 tinyint unsigned, col6 smallint unsigned,
|
||||
col7 int unsigned, col8 bigint unsigned, col9 float, col10 double, col11 bool, col12 binary(20), col13 nchar(20)) tags(loc nchar(20))''')
|
||||
tdSql.execute("create table stb_1 using stb tags('beijing')")
|
||||
self.column_dict = {
|
||||
'ts' : 'timestamp',
|
||||
'col1': 'tinyint',
|
||||
'col2': 'smallint',
|
||||
'col3': 'int',
|
||||
'col4': 'bigint',
|
||||
'col5': 'tinyint unsigned',
|
||||
'col6': 'smallint unsigned',
|
||||
'col7': 'int unsigned',
|
||||
'col8': 'bigint unsigned',
|
||||
'col9': 'float',
|
||||
'col10': 'double',
|
||||
'col11': 'bool',
|
||||
'col12': 'binary(20)',
|
||||
'col13': 'nchar(20)'
|
||||
}
|
||||
|
||||
self.param_list = [1,100]
|
||||
|
||||
def insert_data(self,column_dict,tbname,row_num):
|
||||
sql = ''
|
||||
for k, v in column_dict.items():
|
||||
if v.lower() == 'timestamp' or v.lower() == 'tinyint' or v.lower() == 'smallint' or v.lower() == 'int' or v.lower() == 'bigint' or \
|
||||
v.lower() == 'tinyint unsigned' or v.lower() == 'smallint unsigned' or v.lower() == 'int unsigned' or v.lower() == 'bigint unsigned' or v.lower() == 'bool':
|
||||
sql += '%d,'
|
||||
elif v.lower() == 'float' or v.lower() == 'double':
|
||||
sql += '%f,'
|
||||
elif 'binary' in v.lower():
|
||||
sql += f'"{self.binary_str}%d",'
|
||||
elif 'nchar' in v.lower():
|
||||
sql += f'"{self.nchar_str}%d",'
|
||||
insert_sql = f'insert into {tbname} values({sql[:-1]})'
|
||||
for i in range(row_num):
|
||||
insert_list = []
|
||||
for k, v in column_dict.items():
|
||||
if v.lower() in[ 'tinyint' , 'smallint' , 'int', 'bigint' , 'tinyint unsigned' , 'smallint unsigned' , 'int unsigned' , 'bigint unsigned'] or\
|
||||
'binary' in v.lower() or 'nchar' in v.lower():
|
||||
insert_list.append(1 + i)
|
||||
elif v.lower() == 'float' or v.lower() == 'double':
|
||||
insert_list.append(0.1 + i)
|
||||
elif v.lower() == 'bool':
|
||||
insert_list.append(i % 2)
|
||||
elif v.lower() == 'timestamp':
|
||||
insert_list.append(self.ts + i)
|
||||
tdSql.execute(insert_sql%(tuple(insert_list)))
|
||||
pass
|
||||
def top_check_data(self,tbname,tb_type):
|
||||
new_column_dict = {}
|
||||
for param in self.param_list:
|
||||
for k,v in self.column_dict.items():
|
||||
if v.lower() in ['tinyint','smallint','int','bigint','tinyint unsigned','smallint unsigned','int unsigned','bigint unsigned']:
|
||||
tdSql.query(f'select top({k},{param}) from {tbname}')
|
||||
if param >= self.rowNum:
|
||||
if tb_type in ['normal_table','child_table']:
|
||||
tdSql.checkRows(self.rowNum)
|
||||
values_list = []
|
||||
for i in range(self.rowNum):
|
||||
tdSql.execute(f"insert into stb_1 values(%d, %d, %d, %d, %d, %d, %d, %d, %d, %f, %f, %d, '{self.binary_str}%d', '{self.nchar_str}%d')"
|
||||
% (self.ts + i, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 0.1, i + 0.1, i % 2, i + 1, i + 1))
|
||||
column_list = ['col1','col2','col3','col4','col5','col6','col7','col8']
|
||||
error_column_list = ['col11','col12','col13']
|
||||
error_param_list = [0,101]
|
||||
for i in column_list:
|
||||
tdSql.query(f'select top({i},2) from stb_1')
|
||||
tp = (self.rowNum-i,)
|
||||
values_list.insert(0,tp)
|
||||
tdSql.checkEqual(tdSql.queryResult,values_list)
|
||||
elif tb_type == 'stable':
|
||||
tdSql.checkRows(param)
|
||||
elif param < self.rowNum:
|
||||
if tb_type in ['normal_table','child_table']:
|
||||
tdSql.checkRows(param)
|
||||
values_list = []
|
||||
for i in range(param):
|
||||
tp = (self.rowNum-i,)
|
||||
values_list.insert(0,tp)
|
||||
tdSql.checkEqual(tdSql.queryResult,values_list)
|
||||
elif tb_type == 'stable':
|
||||
tdSql.checkRows(param)
|
||||
for i in [self.param_list[0]-1,self.param_list[-1]+1]:
|
||||
tdSql.error(f'select top({k},{i}) from {tbname}')
|
||||
new_column_dict.update({k:v})
|
||||
elif v.lower() == 'bool' or 'binary' in v.lower() or 'nchar' in v.lower():
|
||||
tdSql.error(f'select top({k},{param}) from {tbname}')
|
||||
tdSql.error(f'select * from {tbname} where top({k},{param})=1')
|
||||
for key in new_column_dict.keys():
|
||||
for k in self.column_dict.keys():
|
||||
if key == k :
|
||||
continue
|
||||
else:
|
||||
tdSql.query(f'select top({key},2),{k} from {tbname} group by tbname')
|
||||
if tb_type == 'normal_table' or tb_type == 'child_table':
|
||||
tdSql.checkRows(2)
|
||||
tdSql.checkEqual(tdSql.queryResult,[(9,),(10,)])
|
||||
for j in error_param_list:
|
||||
tdSql.error(f'select top({i},{j}) from stb_1')
|
||||
for i in error_column_list:
|
||||
tdSql.error(f'select top({i},10) from stb_1')
|
||||
tdSql.query("select ts,top(col1, 2),ts from stb_1 group by tbname")
|
||||
tdSql.checkRows(2)
|
||||
tdSql.query('select top(col2,1) from stb_1 interval(1y) order by col2')
|
||||
tdSql.checkData(0,0,10)
|
||||
tdSql.error("select * from stb_1 where top(col2,1)=1")
|
||||
tdSql.execute('drop database db')
|
||||
def top_check_stb_distribute(self):
|
||||
# prepare data for vgroup 4
|
||||
else:
|
||||
tdSql.checkRows(2*self.tbnum)
|
||||
def top_check_stb(self):
|
||||
dbname = tdCom.getLongName(10, "letters")
|
||||
stbname = tdCom.getLongName(5, "letters")
|
||||
tag_dict = {
|
||||
't0':'int'
|
||||
}
|
||||
tag_values = [
|
||||
f'1'
|
||||
]
|
||||
tdSql.execute(f"create database if not exists {dbname} vgroups 2")
|
||||
tdSql.execute(f'use {dbname}')
|
||||
# build 20 child tables,every table insert 10 rows
|
||||
tdSql.execute(f'''create table {stbname}(ts timestamp, col1 tinyint, col2 smallint, col3 int, col4 bigint, col5 tinyint unsigned, col6 smallint unsigned,
|
||||
col7 int unsigned, col8 bigint unsigned, col9 float, col10 double, col11 bool, col12 binary(20), col13 nchar(20)) tags(loc nchar(20))''')
|
||||
tdSql.execute(self.setsql.set_create_stable_sql(stbname,self.column_dict,tag_dict))
|
||||
|
||||
for i in range(self.tbnum):
|
||||
tdSql.execute(f"create table {stbname}_{i} using {stbname} tags('beijing')")
|
||||
tdSql.execute(f"insert into {stbname}_{i}(ts) values(%d)" % (self.ts - 1-i))
|
||||
column_list = ['col1','col2','col3','col4','col5','col6','col7','col8']
|
||||
for i in [f'{stbname}', f'{dbname}.{stbname}']:
|
||||
for j in column_list:
|
||||
tdSql.query(f"select top({j},1) from {i}")
|
||||
tdSql.checkRows(0)
|
||||
tdSql.execute(f"create table {stbname}_{i} using {stbname} tags({tag_values[0]})")
|
||||
tdSql.execute(self.insert_data(self.column_dict,f'{stbname}_{i}',self.rowNum))
|
||||
tdSql.query('show tables')
|
||||
vgroup_list = []
|
||||
for i in range(len(tdSql.queryResult)):
|
||||
vgroup_list.append(tdSql.queryResult[i][6])
|
||||
vgroup_list_set = set(vgroup_list)
|
||||
|
||||
vgroup_list_set = set(vgroup_list)
|
||||
for i in vgroup_list_set:
|
||||
vgroups_num = vgroup_list.count(i)
|
||||
if vgroups_num >=2:
|
||||
if vgroups_num >= 2:
|
||||
tdLog.info(f'This scene with {vgroups_num} vgroups is ok!')
|
||||
continue
|
||||
|
||||
else:
|
||||
tdLog.exit(f'This scene does not meet the requirements with {vgroups_num} vgroup!\n')
|
||||
for i in range(self.rowNum):
|
||||
for j in range(self.tbnum):
|
||||
tdSql.execute(f"insert into {stbname}_{j} values(%d, %d, %d, %d, %d, %d, %d, %d, %d, %f, %f, %d, '{self.binary_str}%d', '{self.nchar_str}%d')"
|
||||
% (self.ts + i, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 1, i + 0.1, i + 0.1, i % 2, i + 1, i + 1))
|
||||
tdLog.exit(
|
||||
'This scene does not meet the requirements with {vgroups_num} vgroup!\n')
|
||||
for i in range(self.tbnum):
|
||||
self.top_check_data(f'{stbname}_{i}','child_table')
|
||||
self.top_check_data(stbname,'stable')
|
||||
tdSql.execute(f'drop database {dbname}')
|
||||
|
||||
error_column_list = ['col11','col12','col13']
|
||||
error_param_list = [0,101]
|
||||
for i in column_list:
|
||||
tdSql.query(f'select top({i},2) from {stbname}')
|
||||
tdSql.checkRows(2)
|
||||
tdSql.checkEqual(tdSql.queryResult,[(10,),(10,)])
|
||||
for j in error_param_list:
|
||||
tdSql.error(f'select top({i},{j}) from {stbname}')
|
||||
for i in error_column_list:
|
||||
tdSql.error(f'select top({i},10) from {stbname}')
|
||||
def top_check_ntb(self):
|
||||
tdSql.prepare()
|
||||
tdSql.execute(self.setsql.set_create_normaltable_sql(self.ntbname,self.column_dict))
|
||||
self.insert_data(self.column_dict,self.ntbname,self.rowNum)
|
||||
self.top_check_data(self.ntbname,'normal_table')
|
||||
tdSql.execute('drop database db')
|
||||
|
||||
tdSql.query(f"select ts,top(col1, 2),ts from {stbname} group by tbname")
|
||||
tdSql.checkRows(2*self.tbnum)
|
||||
tdSql.query(f'select top(col2,1) from {stbname} interval(1y) order by col2')
|
||||
tdSql.checkData(0,0,10)
|
||||
tdSql.error(f"select * from {stbname} where top(col2,1)=1")
|
||||
def run(self):
|
||||
self.top_check_base()
|
||||
self.top_check_stb_distribute()
|
||||
self.top_check_ntb()
|
||||
self.top_check_stb()
|
||||
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success("%s successfully executed" % __file__)
|
||||
|
|
Loading…
Reference in New Issue