fix: run s3_basic.py ok

This commit is contained in:
Alex Duan 2023-12-30 14:39:39 +08:00
parent 1e8244f3c6
commit 64164f5684
6 changed files with 57 additions and 13 deletions

View File

@ -29,7 +29,7 @@
"childtable_prefix": "d",
"insert_mode": "taosc",
"timestamp_step": 1000,
"start_timestamp":"now-12d",
"start_timestamp":"now-15d",
"columns": [
{ "type": "bool", "name": "bc"},
{ "type": "float", "name": "fc" },

View File

@ -17,6 +17,7 @@ import time
import taos
import frame
import frame.etool
import frame.eos
from frame.log import *
from frame.cases import *
@ -24,6 +25,7 @@ from frame.sql import *
from frame.caseBase import *
from frame.srvCtl import *
from frame import *
from frame.eos import *
#
# 192.168.1.52 MINIO S3 API KEY: MQCEIoaPGUs1mhXgpUAu:XTgpN2dEMInnYgqN4gj3G5zgb39ROtsisKKy0GFa
@ -42,13 +44,16 @@ class TDTestCase(TBase):
's3EndPoint': 'http://192.168.1.52:9000',
's3AccessKey': 'MQCEIoaPGUs1mhXgpUAu:XTgpN2dEMInnYgqN4gj3G5zgb39ROtsisKKy0GFa',
's3BucketName': 'ci-bucket',
's3BlockSize': '10240',
's3BlockCacheSize': '320',
's3PageCacheSize': '10240',
's3UploadDelaySec':'60'
}
def insertData(self):
tdLog.info(f"insert data.")
# taosBenchmark run
json = etool.curFile(__file__, "mlevel_basic.json")
json = etool.curFile(__file__, "s3_basic.json")
etool.runBenchmark(json=json)
tdSql.execute(f"use {self.db}")
@ -63,12 +68,15 @@ class TDTestCase(TBase):
self.compactDb()
# sleep 70s
tdLog.info(f"wait 70s ...")
time.sleep(70)
self.trimDb()
rootPath = sc.clusterRootPath()
cmd = f"ls {rootPath}/dnode1/data02/vnode/vnode*/tsdb/*.data"
loop = 0
while len(sc.dnodeDataFiles()) > 0 and loop < 10:
time.sleep(10)
while len(eos.runRetList(cmd)) > 0 and loop < 40:
time.sleep(5)
self.trimDb()
loop += 1

View File

@ -63,17 +63,17 @@ class TBase:
# db action
#
def trimDb(self):
tdSql.execute(f"trim database {self.db}")
def trimDb(self, show = False):
tdSql.execute(f"trim database {self.db}", show = show)
def compactDb(self):
tdSql.execute(f"compact database {self.db}")
def compactDb(self, show = False):
tdSql.execute(f"compact database {self.db}", show = show)
def flushDb(self):
tdSql.execute(f"flush database {self.db}")
def flushDb(self, show = False):
tdSql.execute(f"flush database {self.db}", show = show)
def dropDb(self):
tdSql.execute(f"drop database {self.db}")
def dropDb(self, show = False):
tdSql.execute(f"drop database {self.db}", show = show)
#

View File

@ -20,11 +20,16 @@ import os
import time
import datetime
import platform
import subprocess
# if windows platform return True
def isWin():
return platform.system().lower() == 'windows'
#
# execute programe
#
# wait util execute file finished
def exe(file):
return os.system(file)
@ -34,3 +39,19 @@ def exeNoWait(file):
print("exe no wait")
# run return output and error
def run(command):
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait(3)
output = process.stdout.read().decode(encoding="gbk")
error = process.stderr.read().decode(encoding="gbk")
return output, error
# return list after run
def runRetList(command):
lines = []
output,error = run(command)
return output.splitlines()

View File

@ -51,3 +51,6 @@ def binPath():
def binFile(filename):
return binPath() + filename

View File

@ -16,6 +16,8 @@ import os
import time
import datetime
from frame.server.dnodes import *
class srvCtl:
def __init__(self):
# record server information
@ -24,9 +26,19 @@ class srvCtl:
self.mLevel = 0
self.mLevelDisk = 0
#
# about path
#
# get cluster root path like /root/TDinternal/sim/
def clusterRootPath(self):
return tdDnodes.path
# return dnode data files list
def dnodeDataFiles(self, idx):
files = []
return files
sc = srvCtl()