enh: add test case

This commit is contained in:
Hongze Cheng 2024-12-26 20:10:13 +08:00
parent bb2185b8de
commit 34a4608c7d
3 changed files with 275 additions and 0 deletions

View File

@ -0,0 +1,264 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
from datetime import timedelta
import sys
import time
import random
import taos
import frame
import frame.etool
from frame.log import *
from frame.cases import *
from frame.sql import *
from frame.caseBase import *
from frame import *
from frame.srvCtl import *
from frame.clusterCommonCheck import clusterComCheck
class TDTestCase(TBase):
def init(self, conn, logSql, replicaVar=3):
# super(TDTestCase, self).init(conn, logSql, replicaVar=3, db="db")
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor(), logSql)
self.vgroupNum = 3
self.dbName = 'test'
self.dnode1Path = tdCom.getTaosdPath()
self.dnode1Cfg = f'{self.dnode1Path}/cfg'
self.dnode1Log = f'{self.dnode1Path}/log'
def _write_bulk_data(self):
tdLog.info("============== write bulk data ===============")
json_content = f"""
{{
"filetype": "insert",
"cfgdir": "{self.dnode1Cfg}",
"host": "localhost",
"port": 6030,
"user": "root",
"password": "taosdata",
"connection_pool_size": 8,
"thread_count": 16,
"create_table_thread_count": 10,
"result_file": "./insert_res.txt",
"confirm_parameter_prompt": "no",
"insert_interval": 0,
"interlace_rows": 5,
"num_of_records_per_req": 1540,
"prepared_rand": 10000,
"chinese": "no",
"databases": [
{{
"dbinfo": {{
"name": "{self.dbName}",
"drop": "yes",
"vgroups": 1,
"duration": "10d",
"wal_retention_period": 0,
"replica": 3,
"stt_trigger": 2
}},
"super_tables": [
{{
"name": "stb",
"child_table_exists": "no",
"childtable_count": 100,
"childtable_prefix": "ctb",
"escape_character": "yes",
"auto_create_table": "no",
"batch_create_tbl_num": 500,
"data_source": "rand",
"insert_mode": "taosc",
"non_stop_mode": "no",
"line_protocol": "line",
"insert_rows": 10000,
"interlace_rows": 1,
"insert_interval": 0,
"partial_col_num": 0,
"disorder_ratio": 0,
"disorder_range": 0,
"timestamp_step": 1000,
"start_timestamp": "{(datetime.now() - timedelta(days=1)).replace(hour=10, minute=0, second=0, microsecond=0).strftime('%Y-%m-%d %H:%M:%S')}",
"use_sample_ts": "no",
"tags_file": "",
"columns": [
{{
"type": "bigint",
"count": 10
}}
],
"tags": [
{{
"type": "TINYINT",
"name": "groupid",
"max": 10,
"min": 1
}},
{{
"name": "location",
"type": "BINARY",
"len": 16,
"values": [
"beijing",
"shanghai"
]
}}
]
}}
]
}}
]
}}
"""
json_file = '/tmp/test.json'
with open(json_file, 'w') as f:
f.write(json_content)
# Use subprocess.run() to wait for the command to finish
subprocess.run(f'taosBenchmark -f {json_file}', shell=True, check=True)
def _write_bulk_data2(self):
tdLog.info("============== write bulk data ===============")
json_content = f"""
{{
"filetype": "insert",
"cfgdir": "{self.dnode1Cfg}",
"host": "localhost",
"port": 6030,
"user": "root",
"password": "taosdata",
"connection_pool_size": 8,
"thread_count": 16,
"create_table_thread_count": 10,
"result_file": "./insert_res.txt",
"confirm_parameter_prompt": "no",
"insert_interval": 0,
"interlace_rows": 5,
"num_of_records_per_req": 1540,
"prepared_rand": 10000,
"chinese": "no",
"databases": [
{{
"dbinfo": {{
"name": "{self.dbName}",
"drop": "no",
"vgroups": 1,
"duration": "10d",
"wal_retention_period": 0,
"replica": 3,
"stt_trigger": 2
}},
"super_tables": [
{{
"name": "stb",
"child_table_exists": "yes",
"childtable_count": 100,
"childtable_prefix": "ctb",
"escape_character": "yes",
"auto_create_table": "no",
"batch_create_tbl_num": 500,
"data_source": "rand",
"insert_mode": "taosc",
"non_stop_mode": "no",
"line_protocol": "line",
"insert_rows": 10000,
"interlace_rows": 1,
"insert_interval": 0,
"partial_col_num": 0,
"disorder_ratio": 0,
"disorder_range": 0,
"timestamp_step": 1000,
"start_timestamp": "{(datetime.now() - timedelta(days=1)).replace(hour=14, minute=0, second=0, microsecond=0).strftime('%Y-%m-%d %H:%M:%S')}",
"use_sample_ts": "no",
"tags_file": "",
"columns": [
{{
"type": "bigint",
"count": 10
}}
],
"tags": [
{{
"type": "TINYINT",
"name": "groupid",
"max": 10,
"min": 1
}},
{{
"name": "location",
"type": "BINARY",
"len": 16,
"values": [
"beijing",
"shanghai"
]
}}
]
}}
]
}}
]
}}
"""
json_file = '/tmp/test.json'
with open(json_file, 'w') as f:
f.write(json_content)
# Use subprocess.run() to wait for the command to finish
subprocess.run(f'taosBenchmark -f {json_file}', shell=True, check=True)
def run(self):
tdLog.info("============== write bulk data ===============")
self._write_bulk_data()
tdSql.execute(f'flush database {self.dbName}')
tdLog.sleep(10)
tdLog.info("============== stop dnode 3 ===============")
cluster.dnodes[2].stoptaosd()
tdLog.sleep(10)
# tdLog.info("============== write more data ===============")
self._write_bulk_data2()
tdSql.execute(f'flush database {self.dbName}')
tdLog.sleep(10)
cluster.dnodes[0].stoptaosd()
cluster.dnodes[1].stoptaosd()
dnode1_wal = f'{self.dnode1Path}/data/vnode/vnode2/wal'
dnode2_wal = f'{self.dnode1Path}/../dnode2/data/vnode/vnode2/wal'
tdLog.info("============== remove wal files ===============")
tdLog.info(f"{dnode1_wal}")
tdLog.info(f"{dnode2_wal}")
os.system(f'rm -rf {dnode1_wal}/*')
os.system(f'rm -rf {dnode2_wal}/*')
tdLog.info("============== restart cluster ===============")
cluster.dnodes[0].starttaosd()
cluster.dnodes[1].starttaosd()
cluster.dnodes[2].starttaosd()
tdLog.sleep(10)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -214,6 +214,16 @@ class TDCom:
telnet_url = "http://127.0.0.1:6041/opentsdb/v1/put/telnet"
return header, sql_url, sqlt_url, sqlutc_url, influx_url, telnet_url
def getTaosdPath(self, dnodeID="dnode1"):
buildPath = self.getBuildPath()
if (buildPath == ""):
tdLog.exit("taosd not found!")
else:
tdLog.info("taosd found in %s" % buildPath)
taosdPath = buildPath + "/../sim/" + dnodeID
tdLog.info("taosdPath: %s" % taosdPath)
return taosdPath
def genTcpParam(self):
MaxBytes = 1024*1024
host ='127.0.0.1'

View File

@ -34,6 +34,7 @@
,,y,army,./pytest.sh python3 ./test.py -f cluster/test_drop_table_by_uid.py -N 3
,,y,army,./pytest.sh python3 ./test.py -f cluster/incSnapshot.py -N 3
,,y,army,./pytest.sh python3 ./test.py -f cluster/clusterBasic.py -N 5
,,y,army,./pytest.sh python3 ./test.py -f cluster/tsdbSnapshot.py -N 3 -M 3
,,y,army,./pytest.sh python3 ./test.py -f query/query_basic.py -N 3
,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_query_accuracy.py
,,y,army,./pytest.sh python3 ./test.py -f query/accuracy/test_ts5400.py