docs: query forbid super table and special-table together (#30284)
* docs: query forbid super table and special-table together * fix: taosBenchmark official case add to CI * fix: tmq check consumer result * docs: adjust json format and add note tips for both query * docs: json format indent for query.json * fix: forbid both query from v3.3.5.6
This commit is contained in:
parent
0baea6ad8e
commit
d27848fe3b
|
@ -355,6 +355,7 @@ Specify the configuration parameters for tag and data columns in `super_tables`
|
|||
|
||||
`query_times` specifies the number of times to run the query, numeric type.
|
||||
|
||||
**Note: from version 3.3.5.6 and beyond, simultaneous configuration for `specified_table_query` and `super_table_query` in a JSON file is no longer supported **
|
||||
|
||||
For other common parameters, see [General Configuration Parameters](#general-configuration-parameters)
|
||||
|
||||
|
@ -484,6 +485,15 @@ Note: Data types in the taosBenchmark configuration file must be in lowercase to
|
|||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>queryStb.json</summary>
|
||||
|
||||
```json
|
||||
{{#include /TDengine/tools/taos-tools/example/queryStb.json}}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
#### Subscription Example
|
||||
|
||||
<details>
|
||||
|
|
|
@ -263,6 +263,8 @@ taosBenchmark -f <json file>
|
|||
|
||||
其它通用参数详见 [通用配置参数](#通用配置参数)。
|
||||
|
||||
**说明:从 v3.3.5.6 及以上版本不再支持 json 文件中同时配置 `specified_table_query` 和 `super_table_query`**
|
||||
|
||||
#### 执行指定查询语句
|
||||
|
||||
查询指定表(可以指定超级表、子表或普通表)的配置参数在 `specified_table_query` 中设置。
|
||||
|
@ -389,6 +391,15 @@ taosBenchmark -f <json file>
|
|||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>queryStb.json</summary>
|
||||
|
||||
```json
|
||||
{{#include /TDengine/tools/taos-tools/example/queryStb.json}}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### 订阅 JSON 示例
|
||||
|
||||
<details>
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 100,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "select last_row(*) from meters"
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 100,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"concurrent": 4,
|
||||
"threads": 4,
|
||||
"mixed_query": "yes",
|
||||
"sqls": [
|
||||
{
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 5,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"concurrent": 5,
|
||||
"threads": 5,
|
||||
"query_interval": 1000,
|
||||
"mixed_query": "yes",
|
||||
"batch_query": "yes",
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 5,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"concurrent": 5,
|
||||
"threads": 5,
|
||||
"query_interval": 100,
|
||||
"mixed_query": "yes",
|
||||
"batch_query": "no",
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 100,
|
||||
"query_mode": "rest",
|
||||
"specified_table_query": {
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"mixed_query": "yes",
|
||||
"sqls": [
|
||||
{
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"query_times": 100,
|
||||
"query_mode": "rest",
|
||||
"specified_table_query": {
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "select last_row(*) from meters"
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
"query_mode": "taosc",
|
||||
"super_table_query": {
|
||||
"stblname": "meters",
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"query_interval": 0,
|
||||
"sqls": [
|
||||
{
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
"query_mode": "rest",
|
||||
"super_table_query": {
|
||||
"stblname": "meters",
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"query_interval": 0,
|
||||
"sqls": [
|
||||
{
|
||||
|
|
|
@ -0,0 +1,257 @@
|
|||
###################################################################
|
||||
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is proprietary and confidential to TAOS Technologies.
|
||||
# No part of this file may be reproduced, stored, transmitted,
|
||||
# disclosed or used in any form or by any means other than as
|
||||
# expressly provided by the written permission from Jianhui Tao
|
||||
#
|
||||
###################################################################
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import platform
|
||||
import subprocess
|
||||
|
||||
import frame
|
||||
import frame.eos
|
||||
import frame.etool
|
||||
from frame.log import *
|
||||
from frame.cases import *
|
||||
from frame.sql import *
|
||||
from frame.caseBase import *
|
||||
from frame import *
|
||||
|
||||
|
||||
# reomve single and double quotation
|
||||
def removeQuotation(origin):
|
||||
value = ""
|
||||
for c in origin:
|
||||
if c != '\'' and c != '"':
|
||||
value += c
|
||||
|
||||
return value
|
||||
|
||||
class TDTestCase(TBase):
|
||||
def caseDescription(self):
|
||||
"""
|
||||
taosBenchmark query->Basic test cases
|
||||
"""
|
||||
|
||||
def runSeconds(self, command, timeout = 180):
|
||||
tdLog.info(f"runSeconds {command} ...")
|
||||
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
process.wait(timeout)
|
||||
|
||||
# get output
|
||||
output = process.stdout.read().decode(encoding="gbk")
|
||||
error = process.stderr.read().decode(encoding="gbk")
|
||||
return output, error
|
||||
|
||||
def getKeyValue(self, content, key, end):
|
||||
# find key
|
||||
s = content.find(key)
|
||||
if s == -1:
|
||||
return False,""
|
||||
|
||||
# skip self
|
||||
s += len(key)
|
||||
# skip blank
|
||||
while s < len(content):
|
||||
if content[s] != " ":
|
||||
break
|
||||
s += 1
|
||||
|
||||
# end check
|
||||
if s + 1 == len(content):
|
||||
return False, ""
|
||||
|
||||
# find end
|
||||
if len(end) == 0:
|
||||
e = -1
|
||||
else:
|
||||
e = content.find(end, s)
|
||||
|
||||
# get value
|
||||
if e == -1:
|
||||
value = content[s : ]
|
||||
else:
|
||||
value = content[s : e]
|
||||
|
||||
return True, value
|
||||
|
||||
def getDbRows(self, times):
|
||||
sql = f"select count(*) from test.meters"
|
||||
tdSql.waitedQuery(sql, 1, times)
|
||||
dbRows = tdSql.getData(0, 0)
|
||||
return dbRows
|
||||
|
||||
def checkItem(self, output, key, end, expect, equal):
|
||||
ret, value = self.getKeyValue(output, key, end)
|
||||
if ret == False:
|
||||
tdLog.exit(f"not found key:{key}. end:{end} output:\n{output}")
|
||||
|
||||
fval = float(value)
|
||||
# compare
|
||||
if equal and fval != expect:
|
||||
tdLog.exit(f"check not expect. expect:{expect} real:{fval}, key:'{key}' end:'{end}' output:\n{output}")
|
||||
elif equal == False and fval <= expect:
|
||||
tdLog.exit(f"failed because {fval} <= {expect}, key:'{key}' end:'{end}' output:\n{output}")
|
||||
else:
|
||||
# succ
|
||||
if equal:
|
||||
tdLog.info(f"check successfully. key:'{key}' expect:{expect} real:{fval}")
|
||||
else:
|
||||
tdLog.info(f"check successfully. key:'{key}' {fval} > {expect}")
|
||||
|
||||
|
||||
def checkAfterRun(self, benchmark, jsonFile, specMode, tbCnt):
|
||||
# run
|
||||
cmd = f"{benchmark} -f {jsonFile}"
|
||||
output, error = self.runSeconds(cmd)
|
||||
|
||||
if specMode :
|
||||
label = "specified_table_query"
|
||||
else:
|
||||
label = "super_table_query"
|
||||
|
||||
#
|
||||
# check insert result
|
||||
#
|
||||
with open(jsonFile, "r") as file:
|
||||
data = json.load(file)
|
||||
|
||||
queryTimes = data["query_times"]
|
||||
# contineIfFail
|
||||
try:
|
||||
continueIfFail = data["continue_if_fail"]
|
||||
except:
|
||||
continueIfFail = "no"
|
||||
|
||||
threads = data[label]["threads"]
|
||||
sqls = data[label]["sqls"]
|
||||
|
||||
|
||||
# batch_query
|
||||
try:
|
||||
batchQuery = data[label]["batch_query"]
|
||||
except:
|
||||
batchQuery = "no"
|
||||
|
||||
# mixed_query
|
||||
try:
|
||||
mixedQuery = data[label]["mixed_query"]
|
||||
except:
|
||||
mixedQuery = "no"
|
||||
|
||||
tdLog.info(f"queryTimes={queryTimes} threads={threads} mixedQuery={mixedQuery} "
|
||||
f"batchQuery={batchQuery} len(sqls)={len(sqls)} label={label}\n")
|
||||
|
||||
totalQueries = 0
|
||||
threadQueries = 0
|
||||
QPS = 10
|
||||
|
||||
if continueIfFail.lower() == "yes":
|
||||
allEnd = " "
|
||||
else:
|
||||
allEnd = "\n"
|
||||
|
||||
if specMode and mixedQuery.lower() != "yes":
|
||||
# spec
|
||||
threadQueries = queryTimes * threads
|
||||
totalQueries = queryTimes * threads * len(sqls)
|
||||
threadKey = f"complete query with {threads} threads and "
|
||||
qpsKey = "QPS: "
|
||||
avgKey = "query delay avg: "
|
||||
minKey = "min:"
|
||||
else:
|
||||
# spec mixed or super
|
||||
|
||||
if specMode:
|
||||
totalQueries = queryTimes * len(sqls)
|
||||
# spec mixed
|
||||
if batchQuery.lower() == "yes":
|
||||
# batch
|
||||
threadQueries = len(sqls)
|
||||
QPS = 2
|
||||
else:
|
||||
threadQueries = totalQueries
|
||||
else:
|
||||
# super
|
||||
totalQueries = queryTimes * len(sqls) * tbCnt
|
||||
threadQueries = totalQueries
|
||||
|
||||
nSql = len(sqls)
|
||||
if specMode and nSql < threads :
|
||||
tdLog.info(f"set threads = {nSql} because len(sqls) < threads")
|
||||
threads = nSql
|
||||
threadKey = f"using {threads} threads complete query "
|
||||
qpsKey = ""
|
||||
avgKey = "avg delay:"
|
||||
minKey = "min delay:"
|
||||
|
||||
items = [
|
||||
[threadKey, " ", threadQueries, True],
|
||||
[qpsKey, " ", 5, False], # qps need > 1
|
||||
[avgKey, "s", 0, False],
|
||||
[minKey, "s", 0, False],
|
||||
["max: ", "s", 0, False],
|
||||
["p90: ", "s", 0, False],
|
||||
["p95: ", "s", 0, False],
|
||||
["p99: ", "s", 0, False],
|
||||
["INFO: Spend ", " ", 0, False],
|
||||
["completed total queries: ", ",", totalQueries, True],
|
||||
["the QPS of all threads:", allEnd, QPS , False] # all qps need > 5
|
||||
]
|
||||
|
||||
# check
|
||||
for item in items:
|
||||
if len(item[0]) > 0:
|
||||
self.checkItem(output, item[0], item[1], item[2], item[3])
|
||||
|
||||
|
||||
|
||||
# tmq check
|
||||
def checkTmqJson(self, benchmark, json):
|
||||
OK_RESULT = "Consumed total msgs: 30, total rows: 300000"
|
||||
cmd = benchmark + " -f " + json
|
||||
output,error = frame.eos.run(cmd, 600)
|
||||
if output.find(OK_RESULT) != -1:
|
||||
tdLog.info(f"succ: {cmd} found '{OK_RESULT}'")
|
||||
else:
|
||||
tdLog.exit(f"failed: {cmd} not found {OK_RESULT} in:\n{output} \nerror:{error}")
|
||||
|
||||
|
||||
def run(self):
|
||||
tbCnt = 10
|
||||
benchmark = etool.benchMarkFile()
|
||||
|
||||
# insert
|
||||
json = "../../tools/taos-tools/example/insert.json"
|
||||
self.insertBenchJson(json, checkStep=True)
|
||||
|
||||
# query
|
||||
json = "../../tools/taos-tools/example/query.json"
|
||||
self.checkAfterRun(benchmark, json, True, tbCnt)
|
||||
json = "../../tools/taos-tools/example/queryStb.json"
|
||||
self.checkAfterRun(benchmark, json, False, tbCnt)
|
||||
|
||||
# tmq
|
||||
json = "../../tools/taos-tools/example/tmq.json"
|
||||
self.checkTmqJson(benchmark, json)
|
||||
|
||||
|
||||
|
||||
def stop(self):
|
||||
tdSql.close()
|
||||
tdLog.success("%s successfully executed" % __file__)
|
||||
|
||||
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
|
@ -77,6 +77,7 @@
|
|||
#
|
||||
|
||||
# benchmark 64 cases
|
||||
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/websiteCase.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/rest_insert_alltypes_json.py -R
|
||||
,,n,army,python3 ./test.py -f tools/benchmark/basic/taosdemoTestQueryWithJson-mixed-query.py -R
|
||||
,,y,army,./pytest.sh python3 ./test.py -f tools/benchmark/basic/stmt_sample_csv_json.py
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
{
|
||||
"name": "meters",
|
||||
"child_table_exists": "no",
|
||||
"childtable_count": 1000,
|
||||
"childtable_count": 10,
|
||||
"childtable_prefix": "d",
|
||||
"auto_create_table": "no",
|
||||
"batch_create_tbl_num": 5,
|
||||
|
|
|
@ -6,13 +6,13 @@
|
|||
"user": "root",
|
||||
"password": "taosdata",
|
||||
"confirm_parameter_prompt": "no",
|
||||
"continue_if_fail": "yes",
|
||||
"continue_if_fail": "yes",
|
||||
"databases": "test",
|
||||
"query_times": 10,
|
||||
"query_mode": "taosc",
|
||||
"specified_table_query": {
|
||||
"query_interval": 1,
|
||||
"concurrent": 3,
|
||||
"threads": 3,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "select last_row(*) from meters",
|
||||
|
@ -23,16 +23,5 @@
|
|||
"result": "./query_res1.txt"
|
||||
}
|
||||
]
|
||||
},
|
||||
"super_table_query": {
|
||||
"stblname": "meters",
|
||||
"query_interval": 1,
|
||||
"threads": 3,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "select last_row(ts) from xxxx",
|
||||
"result": "./query_res2.txt"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"filetype": "query",
|
||||
"cfgdir": "/etc/taos",
|
||||
"host": "127.0.0.1",
|
||||
"port": 6030,
|
||||
"user": "root",
|
||||
"password": "taosdata",
|
||||
"confirm_parameter_prompt": "no",
|
||||
"continue_if_fail": "yes",
|
||||
"databases": "test",
|
||||
"query_times": 10,
|
||||
"query_mode": "taosc",
|
||||
"super_table_query": {
|
||||
"stblname": "meters",
|
||||
"query_interval": 1,
|
||||
"threads": 3,
|
||||
"sqls": [
|
||||
{
|
||||
"sql": "select last_row(ts) from xxxx",
|
||||
"result": "./query_res2.txt"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue