Merge pull request #24325 from taosdata/case/TS-4411-3.0
Case/ts 4411 3.0
This commit is contained in:
commit
078d0c04f8
|
@ -0,0 +1,67 @@
|
|||
import taos
|
||||
import sys
|
||||
|
||||
from frame.log import *
|
||||
from frame.cases import *
|
||||
from frame.sql import *
|
||||
from frame.caseBase import *
|
||||
from frame import *
|
||||
|
||||
class TDTestCase(TBase):
|
||||
|
||||
def init(self, conn, logSql, replicaVar=1):
|
||||
self.replicaVar = int(replicaVar)
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
#tdSql.init(conn.cursor())
|
||||
tdSql.init(conn.cursor(), logSql) # output sql.txt file
|
||||
|
||||
def run(self):
|
||||
dbname = "db"
|
||||
stbname = "ocloud_point"
|
||||
tbname = "ocloud_point_170658_3837620225_1701134595725266945"
|
||||
|
||||
tdSql.prepare()
|
||||
|
||||
tdLog.printNoPrefix("==========step1:create table")
|
||||
|
||||
tdSql.execute(
|
||||
f'''create stable if not exists {dbname}.{stbname}
|
||||
(wstart timestamp, point_value float) tags (location binary(64), groupId int)
|
||||
'''
|
||||
)
|
||||
|
||||
tdSql.execute(
|
||||
f'''create table if not exists {dbname}.{tbname} using {dbname}.{stbname} tags("California.SanFrancisco", 2)'''
|
||||
)
|
||||
|
||||
sqls = []
|
||||
for i in range(35, 41):
|
||||
if i == 38 or i == 40:
|
||||
sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:{i}:00.000', null)")
|
||||
else:
|
||||
sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:{i}:00.000', 5.0)")
|
||||
|
||||
# sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:36:00.000', 5.0)")
|
||||
# sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:37:00.000', 5.0)")
|
||||
# sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:38:00.000', null)")
|
||||
# sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:39:00.000', 5.0)")
|
||||
# sqls.append(f"insert into {dbname}.{tbname} values('2023-12-26 10:40:00.000', null)")
|
||||
|
||||
|
||||
tdSql.executes(sqls)
|
||||
|
||||
tdLog.printNoPrefix("==========step3:fill data")
|
||||
|
||||
tdSql.query(f"select first(point_value) as pointValue from {dbname}.{tbname} where wstart between '2023-12-26 10:35:00' and '2023-12-26 10:40:00' interval(1M) fill(prev) order by wstart desc limit 100")
|
||||
data = []
|
||||
for i in range(6):
|
||||
row = [5]
|
||||
data.append(row)
|
||||
tdSql.checkDataMem(data)
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -447,6 +447,48 @@ class TDSql:
|
|||
if(show):
|
||||
tdLog.info("check successfully")
|
||||
|
||||
def checkDataMem(self, mem):
|
||||
if not isinstance(mem, list):
|
||||
caller = inspect.getframeinfo(inspect.stack()[1][0])
|
||||
args = (caller.filename, caller.lineno, self.sql)
|
||||
tdLog.exit("%s(%d) failed: sql:%s, expect data is error, must is array[][]" % args)
|
||||
|
||||
if len(mem) != self.queryRows:
|
||||
caller = inspect.getframeinfo(inspect.stack()[1][0])
|
||||
args = (caller.filename, caller.lineno, self.sql, len(mem), self.queryRows)
|
||||
tdLog.exit("%s(%d) failed: sql:%s, row:%d is larger than queryRows:%d" % args)
|
||||
# row, col, data
|
||||
for row, rowData in enumerate(mem):
|
||||
for col, colData in enumerate(rowData):
|
||||
self.checkData(row, col, colData)
|
||||
tdLog.info("check successfully")
|
||||
|
||||
def checkDataCsv(self, csvfilePath):
|
||||
if not isinstance(csvfilePath, str) or len(csvfilePath) == 0:
|
||||
caller = inspect.getframeinfo(inspect.stack()[1][0])
|
||||
args = (caller.filename, caller.lineno, self.sql, csvfilePath)
|
||||
tdLog.exit("%s(%d) failed: sql:%s, expect csvfile path error:%s" % args)
|
||||
|
||||
tdLog.info("read csvfile read begin")
|
||||
data = []
|
||||
try:
|
||||
with open(csvfilePath) as csvfile:
|
||||
csv_reader = csv.reader(csvfile) # csv.reader read csvfile\
|
||||
# header = next(csv_reader) # Read the header of each column in the first row
|
||||
for row in csv_reader: # csv file save to data
|
||||
data.append(row)
|
||||
except FileNotFoundError:
|
||||
caller = inspect.getframeinfo(inspect.stack()[1][0])
|
||||
args = (caller.filename, caller.lineno, self.sql, csvfilePath)
|
||||
tdLog.exit("%s(%d) failed: sql:%s, expect csvfile not find error:%s" % args)
|
||||
except Exception as e:
|
||||
caller = inspect.getframeinfo(inspect.stack()[1][0])
|
||||
args = (caller.filename, caller.lineno, self.sql, csvfilePath, str(e))
|
||||
tdLog.exit("%s(%d) failed: sql:%s, expect csvfile path:%s, read error:%s" % args)
|
||||
|
||||
tdLog.info("read csvfile read successfully")
|
||||
self.checkDataMem(data)
|
||||
|
||||
# return true or false replace exit, no print out
|
||||
def checkRowColNoExit(self, row, col):
|
||||
caller = inspect.getframeinfo(inspect.stack()[2][0])
|
||||
|
|
|
@ -17,6 +17,7 @@ fi
|
|||
,,y,army,./pytest.sh python3 ./test.py -f enterprise/multi-level/mlevel_basic.py -N 3 -L 3 -D 2
|
||||
,,y,army,./pytest.sh python3 ./test.py -f enterprise/s3/s3_basic.py -L 3 -D 1
|
||||
,,y,army,./pytest.sh python3 ./test.py -f community/cluster/snapshot.py -N 3 -L 3 -D 2
|
||||
,,y,army,./pytest.sh python3 ./test.py -f community/query/fill/fill_desc.py -N 3 -L 3 -D 2
|
||||
,,y,army,./pytest.sh python3 ./test.py -f community/cluster/incSnapshot.py -N 3 -L 3 -D 2
|
||||
,,y,army,./pytest.sh python3 ./test.py -f community/query/query_basic.py -N 3
|
||||
,,n,army,python3 ./test.py -f community/cmdline/fullopt.py
|
||||
|
|
Loading…
Reference in New Issue