Merge pull request #29326 from taosdata/enh/TD-33263-3.0-2

enh: add retention test
This commit is contained in:
Hongze Cheng 2024-12-25 19:10:22 +08:00 committed by GitHub
commit fd6633f6ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 184 additions and 0 deletions

View File

@ -384,6 +384,7 @@
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/user_privilege_all.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/fsync.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/retention_test.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/multilevel_createdb.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttl.py
,,y,system-test,./pytest.sh python3 ./test.py -f 0-others/ttlChangeOnWrite.py

View File

@ -0,0 +1,182 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import os
import time
from util.log import *
from util.cases import *
from util.sql import *
from util.common import *
from util.sqlset import *
import subprocess
from datetime import datetime, timedelta
class TDTestCase:
def _prepare_env1(self):
tdLog.info("============== prepare environment 1 ===============")
level_0_path = f'{self.dnode_path}/data00'
cfg = {
level_0_path: 'dataDir',
}
tdSql.createDir(level_0_path)
tdDnodes.stop(1)
tdDnodes.deploy(1, cfg)
tdDnodes.start(1)
def _prepare_env2(self):
tdLog.info("============== prepare environment 2 ===============")
level_0_path = f'{self.dnode_path}/data00'
level_1_path = f'{self.dnode_path}/data01'
cfg = {
f'{level_0_path}': 'dataDir',
f'{level_1_path} 1 0': 'dataDir',
}
tdSql.createDir(level_1_path)
tdDnodes.stop(1)
tdDnodes.deploy(1, cfg)
tdDnodes.start(1)
def _write_bulk_data(self):
tdLog.info("============== write bulk data ===============")
json_content = f"""
{{
"filetype": "insert",
"cfgdir": "{self.cfg_path}",
"host": "localhost",
"port": 6030,
"user": "root",
"password": "taosdata",
"connection_pool_size": 8,
"thread_count": 16,
"create_table_thread_count": 10,
"result_file": "./insert_res.txt",
"confirm_parameter_prompt": "no",
"insert_interval": 0,
"interlace_rows": 5,
"num_of_records_per_req": 1540,
"prepared_rand": 10000,
"chinese": "no",
"databases": [
{{
"dbinfo": {{
"name": "{self.db_name}",
"drop": "yes",
"vgroups": {self.vgroups},
"duration": "1d",
"keep": "3d,6d",
"wal_retention_period": 0,
"stt_trigger": 1
}},
"super_tables": [
{{
"name": "stb",
"child_table_exists": "no",
"childtable_count": 1000,
"childtable_prefix": "ctb",
"escape_character": "yes",
"auto_create_table": "no",
"batch_create_tbl_num": 500,
"data_source": "rand",
"insert_mode": "taosc",
"non_stop_mode": "no",
"line_protocol": "line",
"insert_rows": 10000,
"childtable_limit": 10,
"childtable_offset": 100,
"interlace_rows": 0,
"insert_interval": 0,
"partial_col_num": 0,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 40000,
"start_timestamp": "{(datetime.now() - timedelta(days=5)).strftime('%Y-%m-%d %H:%M:%S')}",
"use_sample_ts": "no",
"tags_file": "",
"columns": [
{{
"type": "bigint",
"count": 10
}}
],
"tags": [
{{
"type": "TINYINT",
"name": "groupid",
"max": 10,
"min": 1
}},
{{
"name": "location",
"type": "BINARY",
"len": 16,
"values": [
"beijing",
"shanghai"
]
}}
]
}}
]
}}
]
}}
"""
json_file = '/tmp/test.json'
with open(json_file, 'w') as f:
f.write(json_content)
# Use subprocess.run() to wait for the command to finish
subprocess.run(f'taosBenchmark -f {json_file}', shell=True, check=True)
def _check_retention(self):
for vgid in range(2, 2+self.vgroups):
tsdb_path = self.dnode_path+f'/data01/vnode/vnode{vgid}/tsdb'
# check the path should not be empty
if not os.listdir(tsdb_path):
tdLog.error(f'{tsdb_path} is empty')
assert False
def run(self):
self._prepare_env1()
self._write_bulk_data()
tdSql.execute(f'flush database {self.db_name}')
tdDnodes.stop(1)
self._prepare_env2()
tdSql.execute(f'trim database {self.db_name}')
time.sleep(10)
self._check_retention()
def init(self, conn, logSql, replicaVar=1):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor())
self.dnode_path = tdCom.getTaosdPath()
self.cfg_path = f'{self.dnode_path}/cfg'
self.log_path = f'{self.dnode_path}/log'
self.db_name = 'test'
self.vgroups = 10
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())

View File

@ -246,6 +246,7 @@ python3 ./test.py -f 0-others/user_privilege_show.py
python3 ./test.py -f 0-others/user_privilege_all.py
python3 ./test.py -f 0-others/fsync.py
python3 ./test.py -f 0-others/multilevel.py
python3 ./test.py -f 0-others/retention_test.py
python3 ./test.py -f 0-others/ttl.py
python3 ./test.py -f 0-others/ttlChangeOnWrite.py
python3 ./test.py -f 0-others/compress_tsz1.py