Merge pull request #24461 from taosdata/coverage/TD-28251-3.0

coverage: sum add all types test
This commit is contained in:
Alex Duan 2024-01-15 17:16:41 +08:00 committed by GitHub
commit 1ae9d0b43c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 292 additions and 26 deletions

View File

@ -832,6 +832,7 @@ int32_t minmaxFunctionFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) {
return code;
}
#ifdef BUILD_NO_CALL
int32_t setNullSelectivityValue(SqlFunctionCtx* pCtx, SSDataBlock* pBlock, int32_t rowIndex) {
if (pCtx->subsidiaries.num <= 0) {
return TSDB_CODE_SUCCESS;
@ -847,6 +848,7 @@ int32_t setNullSelectivityValue(SqlFunctionCtx* pCtx, SSDataBlock* pBlock, int32
return TSDB_CODE_SUCCESS;
}
#endif
int32_t setSelectivityValue(SqlFunctionCtx* pCtx, SSDataBlock* pBlock, const STuplePos* pTuplePos, int32_t rowIndex) {
if (pCtx->subsidiaries.num <= 0) {

View File

@ -6,7 +6,8 @@
"user": "root",
"password": "taosdata",
"connection_pool_size": 8,
"num_of_records_per_req": 2000,
"num_of_records_per_req": 3000,
"prepared_rand": 3000,
"thread_count": 2,
"create_table_thread_count": 1,
"confirm_parameter_prompt": "no",

View File

@ -28,7 +28,9 @@ from frame import *
class TDTestCase(TBase):
updatecfgDict = {
"countAlwaysReturnValue" : "0"
}
def insertData(self):
tdLog.info(f"insert data.")
@ -42,6 +44,10 @@ class TDTestCase(TBase):
self.insert_rows = 100000
self.timestamp_step = 10000
# create count check table
sql = f"create table {self.db}.ta(ts timestamp, age int) tags(area int)"
tdSql.execute(sql)
def doAction(self):
tdLog.info(f"do action.")
self.flushDb()
@ -64,7 +70,10 @@ class TDTestCase(TBase):
selid = random.choice(vgids)
self.balanceVGroupLeaderOn(selid)
# check count always return value
sql = f"select count(*) from {self.db}.ta"
tdSql.query(sql)
tdSql.checkRows(0) # countAlwaysReturnValue is false
# run
def run(self):

View File

@ -75,7 +75,6 @@ class TDTestCase(TBase):
# others
etool.exeBinFile("taos", f'-N 200 -l 2048 -s "{sql}" ', wait=False)
etool.exeBinFile("taos", f'-n server', wait=False)
def doTaosd(self):
@ -86,11 +85,11 @@ class TDTestCase(TBase):
# -s
sdb = "./sdb.json"
eos.delFile(sdb)
etool.runBinFile("taosd", f"-s -c {cfg}")
self.checkFileExist(sdb)
etool.exeBinFile("taosd", f"-s -c {cfg}")
# -C
etool.runBinFile("taosd", "-C")
etool.exeBinFile("taosd", "-C")
# -k
rets = etool.runBinFile("taosd", "-C")
self.checkListNotEmpty(rets)

View File

@ -0,0 +1,61 @@
{
"filetype": "insert",
"cfgdir": "/etc/taos",
"host": "127.0.0.1",
"port": 6030,
"user": "root",
"password": "taosdata",
"connection_pool_size": 8,
"num_of_records_per_req": 4000,
"prepared_rand": 1000,
"thread_count": 3,
"create_table_thread_count": 1,
"confirm_parameter_prompt": "no",
"databases": [
{
"dbinfo": {
"name": "db",
"drop": "yes",
"vgroups": 3,
"replica": 3,
"duration":"3d",
"wal_retention_period": 1,
"wal_retention_size": 1,
"stt_trigger": 1
},
"super_tables": [
{
"name": "stb",
"child_table_exists": "no",
"childtable_count": 6,
"insert_rows": 100000,
"childtable_prefix": "d",
"insert_mode": "taosc",
"timestamp_step": 30000,
"start_timestamp":"2023-10-01 10:00:00",
"columns": [
{ "type": "bool", "name": "bc"},
{ "type": "float", "name": "fc" },
{ "type": "double", "name": "dc"},
{ "type": "tinyint", "name": "ti"},
{ "type": "smallint", "name": "si" },
{ "type": "int", "name": "ic" },
{ "type": "bigint", "name": "bi" },
{ "type": "utinyint", "name": "uti"},
{ "type": "usmallint", "name": "usi"},
{ "type": "uint", "name": "ui" },
{ "type": "ubigint", "name": "ubi"},
{ "type": "binary", "name": "bin", "len": 8},
{ "type": "nchar", "name": "nch", "len": 16}
],
"tags": [
{"type": "tinyint", "name": "groupid","max": 10,"min": 1},
{"name": "location","type": "binary", "len": 16, "values":
["San Francisco", "Los Angles", "San Diego", "San Jose", "Palo Alto", "Campbell", "Mountain View","Sunnyvale", "Santa Clara", "Cupertino"]
}
]
}
]
}
]
}

View File

@ -0,0 +1,79 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
import time
import random
import taos
import frame
import frame.etool
from frame.log import *
from frame.cases import *
from frame.sql import *
from frame.caseBase import *
from frame import *
class TDTestCase(TBase):
updatecfgDict = {
"keepColumnName" : "1",
"ttlChangeOnWrite" : "1",
"querySmaOptimize": "1"
}
def insertData(self):
tdLog.info(f"insert data.")
# taosBenchmark run
jfile = etool.curFile(__file__, "query_basic.json")
etool.benchMark(json=jfile)
tdSql.execute(f"use {self.db}")
# set insert data information
self.childtable_count = 6
self.insert_rows = 100000
self.timestamp_step = 30000
def doQuery(self):
tdLog.info(f"do query.")
# top bottom
sql = f"select top(uti, 5) from {self.stb} "
tdSql.execute(sql)
# run
def run(self):
tdLog.debug(f"start to excute {__file__}")
# insert data
self.insertData()
# check insert data correct
self.checkInsertCorrect()
# check
self.checkConsistency("usi")
# do action
self.doQuery()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -7,6 +7,7 @@
"password": "taosdata",
"connection_pool_size": 8,
"num_of_records_per_req": 2000,
"prepared_rand": 1000,
"thread_count": 2,
"create_table_thread_count": 1,
"confirm_parameter_prompt": "no",

View File

@ -14,11 +14,12 @@ import time
# Auto Gen class
#
class AutoGen:
def __init__(self):
def __init__(self, fillOne=False):
self.ts = 1600000000000
self.batch_size = 100
seed = time.time() % 10000
random.seed(seed)
self.fillOne = fillOne
# set start ts
def set_start_ts(self, ts):
@ -87,6 +88,23 @@ class AutoGen:
return datas
# fill one data
def fillone_data(self, i, marr):
datas = ""
for c in marr:
if datas != "":
datas += ","
if c == 0:
datas += "%d" % (self.ts + i)
elif c == 12 or c == 13: # binary
datas += '"1"'
else:
datas += '1'
return datas
# generate specail wide random string
def random_string(self, count):
letters = string.ascii_letters
@ -96,7 +114,6 @@ class AutoGen:
def create_db(self, dbname, vgroups = 2, replica = 1):
self.dbname = dbname
tdSql.execute(f'create database {dbname} vgroups {vgroups} replica {replica}')
tdSql.execute(f'use {dbname}')
# create table or stable
def create_stable(self, stbname, tag_cnt, column_cnt, binary_len, nchar_len):
@ -106,7 +123,7 @@ class AutoGen:
self.mtags, tags = self.gen_columns_sql("t", tag_cnt, binary_len, nchar_len)
self.mcols, cols = self.gen_columns_sql("c", column_cnt - 1, binary_len, nchar_len)
sql = f"create table {stbname} (ts timestamp, {cols}) tags({tags})"
sql = f"create table {self.dbname}.{stbname} (ts timestamp, {cols}) tags({tags})"
tdSql.execute(sql)
# create child table
@ -115,7 +132,7 @@ class AutoGen:
self.child_name = prename
for i in range(cnt):
tags_data = self.gen_data(i, self.mtags)
sql = f"create table {prename}{i} using {stbname} tags({tags_data})"
sql = f"create table {self.dbname}.{prename}{i} using {self.dbname}.{stbname} tags({tags_data})"
tdSql.execute(sql)
tdLog.info(f"create child tables {cnt} ok")
@ -127,17 +144,20 @@ class AutoGen:
# loop do
for i in range(cnt):
value = self.gen_data(i, self.mcols)
if self.fillOne :
value = self.fillone_data(i, self.mcols)
else:
value = self.gen_data(i, self.mcols)
ts += step
values += f"({ts},{value}) "
if batch_size == 1 or (i > 0 and i % batch_size == 0) :
sql = f"insert into {child_name} values {values}"
sql = f"insert into {self.dbname}.{child_name} values {values}"
tdSql.execute(sql)
values = ""
# end batch
if values != "":
sql = f"insert into {child_name} values {values}"
sql = f"insert into {self.dbname}.{child_name} values {values}"
tdSql.execute(sql)
tdLog.info(f" insert data i={i}")
values = ""
@ -159,5 +179,3 @@ class AutoGen:
self.insert_data_child(name, cnt, self.batch_size, 0)
tdLog.info(f" insert same timestamp ok, child table={self.child_cnt} insert rows={cnt}")

View File

@ -135,8 +135,9 @@ class TBase:
tdSql.checkAgg(sql, self.childtable_count)
# check step
sql = f"select count(*) from (select diff(ts) as dif from {self.stb} partition by tbname) where dif != {self.timestamp_step}"
tdSql.checkAgg(sql, 0)
sql = f"select * from (select diff(ts) as dif from {self.stb} partition by tbname) where dif != {self.timestamp_step}"
tdSql.query(sql)
tdSql.checkRows(0)
# save agg result
def snapshotAgg(self):
@ -156,6 +157,31 @@ class TBase:
tdSql.checkAgg(self.sqlFirst, self.first)
tdSql.checkAgg(self.sqlLast, self.last)
# self check
def checkConsistency(self, col):
# top with max
sql = f"select max({col}) from {self.stb}"
expect = tdSql.getFirstValue(sql)
sql = f"select top({col}, 5) from {self.stb}"
tdSql.checkFirstValue(sql, expect)
#bottom with min
sql = f"select min({col}) from {self.stb}"
expect = tdSql.getFirstValue(sql)
sql = f"select bottom({col}, 5) from {self.stb}"
tdSql.checkFirstValue(sql, expect)
# order by asc limit 1 with first
sql = f"select last({col}) from {self.stb}"
expect = tdSql.getFirstValue(sql)
sql = f"select {col} from {self.stb} order by _c0 desc limit 1"
tdSql.checkFirstValue(sql, expect)
# order by desc limit 1 with last
sql = f"select first({col}) from {self.stb}"
expect = tdSql.getFirstValue(sql)
sql = f"select {col} from {self.stb} order by _c0 asc limit 1"
tdSql.checkFirstValue(sql, expect)
#
# get db information

View File

@ -549,6 +549,12 @@ class TDSql:
def getFirstValue(self, sql) :
self.query(sql)
return self.getData(0, 0)
# expect first value
def checkFirstValue(self, sql, expect):
self.query(sql)
self.checkData(0, 0, expect)
def get_times(self, time_str, precision="ms"):
caller = inspect.getframeinfo(inspect.stack()[1][0])

View File

@ -14,11 +14,12 @@ import time
# Auto Gen class
#
class AutoGen:
def __init__(self):
def __init__(self, fillOne=False):
self.ts = 1600000000000
self.batch_size = 100
seed = time.time() % 10000
random.seed(seed)
self.fillOne = fillOne
# set start ts
def set_start_ts(self, ts):
@ -87,6 +88,23 @@ class AutoGen:
return datas
# fill one data
def fillone_data(self, i, marr):
datas = ""
for c in marr:
if datas != "":
datas += ","
if c == 0:
datas += "%d" % (self.ts + i)
elif c == 12 or c == 13: # binary
datas += '"1"'
else:
datas += '1'
return datas
# generate specail wide random string
def random_string(self, count):
letters = string.ascii_letters
@ -96,7 +114,7 @@ class AutoGen:
def create_db(self, dbname, vgroups = 2, replica = 1):
self.dbname = dbname
tdSql.execute(f'create database {dbname} vgroups {vgroups} replica {replica}')
tdSql.execute(f'use {dbname}')
tdSql.execute(f"use {dbname}")
# create table or stable
def create_stable(self, stbname, tag_cnt, column_cnt, binary_len, nchar_len):
@ -106,7 +124,7 @@ class AutoGen:
self.mtags, tags = self.gen_columns_sql("t", tag_cnt, binary_len, nchar_len)
self.mcols, cols = self.gen_columns_sql("c", column_cnt - 1, binary_len, nchar_len)
sql = f"create table {stbname} (ts timestamp, {cols}) tags({tags})"
sql = f"create table {self.dbname}.{stbname} (ts timestamp, {cols}) tags({tags})"
tdSql.execute(sql)
# create child table
@ -115,7 +133,7 @@ class AutoGen:
self.child_name = prename
for i in range(cnt):
tags_data = self.gen_data(i, self.mtags)
sql = f"create table {prename}{i} using {stbname} tags({tags_data})"
sql = f"create table {self.dbname}.{prename}{i} using {self.dbname}.{stbname} tags({tags_data})"
tdSql.execute(sql)
tdLog.info(f"create child tables {cnt} ok")
@ -127,17 +145,20 @@ class AutoGen:
# loop do
for i in range(cnt):
value = self.gen_data(i, self.mcols)
if self.fillOne :
value = self.fillone_data(i, self.mcols)
else:
value = self.gen_data(i, self.mcols)
ts += step
values += f"({ts},{value}) "
if batch_size == 1 or (i > 0 and i % batch_size == 0) :
sql = f"insert into {child_name} values {values}"
sql = f"insert into {self.dbname}.{child_name} values {values}"
tdSql.execute(sql)
values = ""
# end batch
if values != "":
sql = f"insert into {child_name} values {values}"
sql = f"insert into {self.dbname}.{child_name} values {values}"
tdSql.execute(sql)
tdLog.info(f" insert data i={i}")
values = ""

View File

@ -2,6 +2,7 @@ from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
from util.autogen import *
INT_COL = "c1"
@ -23,11 +24,11 @@ TS_TYPE_COL = [TS_COL]
DBNAME = "db"
class TDTestCase:
def init(self, conn, logSql, replicaVar=1):
self.replicaVar = int(replicaVar)
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
self.autoGen = AutoGen(True)
def __sum_condition(self):
sum_condition = []
@ -207,9 +208,51 @@ class TDTestCase:
'''
)
def testAllTypes(self):
# create stable and insert
tdLog.info("test all types")
dbname = "sumdb"
stbname = "stb"
colnum = 16
self.autoGen.set_batch_size(1000)
self.autoGen.create_db(dbname)
self.autoGen.create_stable(stbname, 16, colnum, 8, 16)
self.autoGen.create_child(stbname, "d", 4)
self.autoGen.insert_data(10000)
# check correct
i = 0
for c in self.autoGen.mcols:
if c in [0, 11, 12, 13]:
i += 1
continue
# query
col = f"c{i}"
sql = f"select count({col}), sum({col}), avg({col}), max({col}), min({col}), stddev({col}), leastsquares({col},1,9) from {dbname}.{stbname}"
tdSql.query(sql)
# sum
tdSql.checkData(0, 0, 4*10000, True)
# sum
tdSql.checkData(0, 1, 4*10000, True)
# avg
tdSql.checkData(0, 2, 1, True)
# max
tdSql.checkData(0, 3, 1, True)
# min
tdSql.checkData(0, 4, 1, True)
# stddev
tdSql.checkData(0, 5, 0, True)
i += 1
def run(self):
tdSql.prepare()
self.testAllTypes()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()