Merge pull request #23140 from taosdata/daily_perf_test
add script for daily perf test
This commit is contained in:
commit
c26d74355c
|
@ -0,0 +1,20 @@
|
|||
#! /bin/bash
|
||||
|
||||
set -x
|
||||
|
||||
cd $1
|
||||
git reset --hard HEAD
|
||||
git checkout -- .
|
||||
git checkout $2
|
||||
git pull
|
||||
|
||||
sed -i ':a;N;$!ba;s/\(.*\)OFF/\1ON/' $1/cmake/cmake.options
|
||||
|
||||
mkdir -p $1/debug
|
||||
rm -rf $1/debug/*
|
||||
cd $1/debug
|
||||
cmake .. -DBUILD_TOOLS=true
|
||||
cd $1/debug
|
||||
make -j 4
|
||||
cd $1/debug
|
||||
make install
|
|
@ -0,0 +1,36 @@
|
|||
import os
|
||||
import subprocess
|
||||
|
||||
class BuildTDengine:
|
||||
def __init__(self, host='vm96', path = '/root/pxiao/TDengine', branch = 'main') -> None:
|
||||
self.host = host
|
||||
self.path = path
|
||||
self.branch = branch
|
||||
|
||||
def build(self):
|
||||
parameters=[self.path, self.branch]
|
||||
build_fild = "./build.sh"
|
||||
try:
|
||||
# Run the Bash script using subprocess
|
||||
subprocess.run(['bash', build_fild] + parameters, check=True)
|
||||
print("TDengine build successfully.")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error running Bash script: {e}")
|
||||
except FileNotFoundError as e:
|
||||
print(f"File not found: {e}")
|
||||
|
||||
def get_commit_id(self):
|
||||
cmd = f"cd {self.path} && git rev-parse --short @ "
|
||||
try:
|
||||
# Run the Bash command and capture the output
|
||||
result = subprocess.run(cmd, stdout=subprocess.PIPE, shell=True, text=True)
|
||||
|
||||
# Access the output from the 'result' object
|
||||
output = result.stdout
|
||||
|
||||
return output.strip()
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error running Bash command: {e}")
|
||||
|
||||
bd = BuildTDengine()
|
||||
print(bd.get_commit_id())
|
|
@ -0,0 +1,100 @@
|
|||
import datetime
|
||||
import json
|
||||
|
||||
class InsertJson:
|
||||
def __init__(self, tables = 10000, records_per_table = 10000, interlace_rows = 0, stt_trigger = 1) -> None:
|
||||
self.tables = tables
|
||||
self.records_per_table = records_per_table
|
||||
self.interlace_rows = interlace_rows
|
||||
self.stt_trigger = stt_trigger
|
||||
|
||||
def get_db_cfg(self) -> dict:
|
||||
return {
|
||||
"name": "test",
|
||||
"drop": "true",
|
||||
"replica": 1,
|
||||
"precision": "ms",
|
||||
"cachemodel": "'both'",
|
||||
"keep": 3650,
|
||||
"minRows": 100,
|
||||
"maxRows": 4096,
|
||||
"comp": 2,
|
||||
"vgroups": 10,
|
||||
"stt_trigger": self.stt_trigger
|
||||
}
|
||||
|
||||
def get_stb_cfg(self) -> list:
|
||||
return [
|
||||
{
|
||||
"name": "meters",
|
||||
"child_table_exists": "no",
|
||||
"childtable_count": self.tables,
|
||||
"childtable_prefix": "d",
|
||||
"escape_character": "yes",
|
||||
"auto_create_table": "no",
|
||||
"batch_create_tbl_num": 5,
|
||||
"data_source": "rand",
|
||||
"insert_mode": "taosc",
|
||||
"non_stop_mode": "no",
|
||||
"line_protocol": "line",
|
||||
"insert_rows": self.records_per_table,
|
||||
"childtable_limit": 10000,
|
||||
"childtable_offset": 100,
|
||||
"interlace_rows": self.interlace_rows,
|
||||
"insert_interval": 0,
|
||||
"partial_col_num": 0,
|
||||
"disorder_ratio": 0,
|
||||
"disorder_range": 1000,
|
||||
"timestamp_step": 10,
|
||||
"start_timestamp": "2022-10-01 00:00:00.000",
|
||||
"sample_format": "csv",
|
||||
"sample_file": "./sample.csv",
|
||||
"use_sample_ts": "no",
|
||||
"tags_file": "",
|
||||
"columns": self.get_column_list(),
|
||||
"tags": self.get_tag_list()
|
||||
}
|
||||
]
|
||||
|
||||
def get_column_list(self) -> list:
|
||||
return [
|
||||
{"type": "FLOAT", "name": "current", "count": 1, "max": 12, "min": 8},
|
||||
{"type": "INT", "name": "voltage", "max": 225, "min": 215},
|
||||
{"type": "FLOAT", "name": "phase", "max": 1, "min": 0},
|
||||
]
|
||||
|
||||
def get_tag_list(self) -> list:
|
||||
return [
|
||||
{ "type": "TINYINT", "name": "groupid", "max": 10, "min": 1 },
|
||||
{ "name": "location", "type": "BINARY", "len": 16, "values": ["San Francisco", "Los Angles", "San Diego", "San Jose", "Palo Alto", "Campbell", "Mountain View", "Sunnyvale", "Santa Clara", "Cupertino"]}
|
||||
]
|
||||
|
||||
def get_insert_cfg(self) -> dict:
|
||||
return {
|
||||
"filetype": "insert",
|
||||
"cfgdir": "/etc/taos",
|
||||
"host": "127.0.0.1",
|
||||
"port": 6030,
|
||||
"user": "root",
|
||||
"password": "taosdata",
|
||||
"thread_count": 10,
|
||||
"create_table_thread_count": 7,
|
||||
"result_file": "/tmp/insert_res.txt",
|
||||
"confirm_parameter_prompt": "no",
|
||||
"insert_interval": 0,
|
||||
"num_of_records_per_req": 1000,
|
||||
"max_sql_len": 1024000,
|
||||
"databases": [{
|
||||
"dbinfo": self.get_db_cfg(),
|
||||
"super_tables": self.get_stb_cfg()
|
||||
}]
|
||||
}
|
||||
|
||||
def create_insert_file(self) -> str:
|
||||
date = datetime.datetime.now()
|
||||
file_create_table = f"/tmp/insert_{date:%F-%H%M}.json"
|
||||
|
||||
with open(file_create_table, 'w') as f:
|
||||
json.dump(self.get_insert_cfg(), f)
|
||||
|
||||
return file_create_table
|
|
@ -0,0 +1,60 @@
|
|||
import mysql.connector
|
||||
|
||||
class MySQLDatabase:
|
||||
def __init__(self, host = '192.168.1.116', port = 3306, user = 'root', password = 'taosdata', database = 'perf_data'):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.user = user
|
||||
self.password = password
|
||||
self.database = database
|
||||
self.connection = None
|
||||
|
||||
def connect(self):
|
||||
try:
|
||||
self.connection = mysql.connector.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.user,
|
||||
password=self.password,
|
||||
database=self.database
|
||||
)
|
||||
except mysql.connector.Error as error:
|
||||
print("Failed to connect to database: {}".format(error))
|
||||
|
||||
def execute(self, query, params=None):
|
||||
cursor = self.connection.cursor()
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
self.connection.commit()
|
||||
except mysql.connector.Error as error:
|
||||
print("Failed to execute query: {}".format(error))
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def query(self, query, params=None):
|
||||
cursor = self.connection.cursor()
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
result = cursor.fetchall()
|
||||
return result
|
||||
except mysql.connector.Error as error:
|
||||
print("Failed to execute query: {}".format(error))
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def get_id(self, query, params = None):
|
||||
cursor = self.connection.cursor()
|
||||
try:
|
||||
cursor.execute(query, params)
|
||||
cursor.execute("select last_insert_id()")
|
||||
id = cursor.fetchone()[0]
|
||||
self.connection.commit()
|
||||
|
||||
return id
|
||||
except mysql.connector.Error as error:
|
||||
print("Failed to execute query: {}".format(error))
|
||||
finally:
|
||||
cursor.close()
|
||||
|
||||
def disconnect(self):
|
||||
self.connection.close()
|
|
@ -0,0 +1,41 @@
|
|||
import datetime
|
||||
import json
|
||||
|
||||
class QueryJson:
|
||||
def __init__(self, sql, query_times = 1) -> None:
|
||||
self.sql = sql
|
||||
self.query_times = query_times
|
||||
|
||||
def gen_query_json(self) -> dict:
|
||||
return {
|
||||
"filetype": "query",
|
||||
"cfgdir": "/etc/taos",
|
||||
"host": "127.0.0.1",
|
||||
"port": 6030,
|
||||
"user": "root",
|
||||
"password": "taosdata",
|
||||
"confirm_parameter_prompt": "no",
|
||||
"databases": "test",
|
||||
"query_times": self.query_times,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"query_interval": 1,
|
||||
"concurrent": 1,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "%s" % self.sql,
|
||||
"result": "./query_res.txt"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def create_query_file(self) -> str:
|
||||
date = datetime.datetime.now()
|
||||
file_create_table = f"/tmp/query_{date:%F-%H%M}.json"
|
||||
|
||||
with open(file_create_table, 'w') as f:
|
||||
json.dump(self.gen_query_json(), f)
|
||||
|
||||
return file_create_table
|
|
@ -0,0 +1,29 @@
|
|||
import os
|
||||
import mysqldb
|
||||
import insert_json
|
||||
import query_json
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
num_of_tables = 10000
|
||||
records_per_table = 10000
|
||||
interlace_rows = 0
|
||||
stt_trigger = 1
|
||||
|
||||
db = mysqldb.MySQLDatabase()
|
||||
db.connect()
|
||||
sql = f"select id from scenarios where num_of_tables = {num_of_tables} and records_per_table = {records_per_table} and interlace_rows = {interlace_rows} and stt_trigger = {stt_trigger}"
|
||||
row = db.query(sql)
|
||||
if row is None:
|
||||
id = db.get_id(f"insert into scenarios(num_of_tables, records_per_table, interlace_rows, stt_trigger) values({num_of_tables},{records_per_table}, {interlace_rows}, {stt_trigger})")
|
||||
else:
|
||||
id = row[0][0]
|
||||
|
||||
print(id)
|
||||
|
||||
db.disconnect()
|
||||
|
||||
insert = insert_json.InsertJson(num_of_tables, records_per_table, interlace_rows, stt_trigger)
|
||||
os.system(f"taosBenchmark -f {insert.create_insert_file()}")
|
||||
|
||||
|
Loading…
Reference in New Issue