test: udfpy_main.py case passed !
This commit is contained in:
parent
6345952f88
commit
e2e76f9e28
|
@ -0,0 +1,171 @@
|
||||||
|
from ssl import ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE
|
||||||
|
import taos
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import os
|
||||||
|
|
||||||
|
from util.log import *
|
||||||
|
from util.sql import *
|
||||||
|
from util.cases import *
|
||||||
|
from util.dnodes import *
|
||||||
|
from util.dnodes import TDDnodes
|
||||||
|
from util.dnodes import TDDnode
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
class MyDnodes(TDDnodes):
|
||||||
|
def __init__(self ,dnodes_lists):
|
||||||
|
super(MyDnodes,self).__init__()
|
||||||
|
self.dnodes = dnodes_lists # dnode must be TDDnode instance
|
||||||
|
self.simDeployed = False
|
||||||
|
|
||||||
|
class TagCluster:
|
||||||
|
noConn = True
|
||||||
|
def init(self, conn, logSql, replicaVar=1):
|
||||||
|
tdLog.debug(f"start to excute {__file__}")
|
||||||
|
self.TDDnodes = None
|
||||||
|
self.depoly_cluster(5)
|
||||||
|
self.master_dnode = self.TDDnodes.dnodes[0]
|
||||||
|
self.host=self.master_dnode.cfgDict["fqdn"]
|
||||||
|
conn1 = taos.connect(self.master_dnode.cfgDict["fqdn"] , config=self.master_dnode.cfgDir)
|
||||||
|
tdSql.init(conn1.cursor())
|
||||||
|
|
||||||
|
|
||||||
|
def getBuildPath(self):
|
||||||
|
selfPath = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
if ("community" in selfPath):
|
||||||
|
projPath = selfPath[:selfPath.find("community")]
|
||||||
|
else:
|
||||||
|
projPath = selfPath[:selfPath.find("tests")]
|
||||||
|
|
||||||
|
for root, dirs, files in os.walk(projPath):
|
||||||
|
if ("taosd" in files or "taosd.exe" in files):
|
||||||
|
rootRealPath = os.path.dirname(os.path.realpath(root))
|
||||||
|
if ("packaging" not in rootRealPath):
|
||||||
|
buildPath = root[:len(root) - len("/build/bin")]
|
||||||
|
break
|
||||||
|
return buildPath
|
||||||
|
|
||||||
|
|
||||||
|
def depoly_cluster(self ,dnodes_nums):
|
||||||
|
|
||||||
|
testCluster = False
|
||||||
|
valgrind = 0
|
||||||
|
hostname = socket.gethostname()
|
||||||
|
dnodes = []
|
||||||
|
start_port = 6030
|
||||||
|
for num in range(1, dnodes_nums+1):
|
||||||
|
dnode = TDDnode(num)
|
||||||
|
dnode.addExtraCfg("firstEp", f"{hostname}:{start_port}")
|
||||||
|
dnode.addExtraCfg("fqdn", f"{hostname}")
|
||||||
|
dnode.addExtraCfg("serverPort", f"{start_port + (num-1)*100}")
|
||||||
|
dnode.addExtraCfg("monitorFqdn", hostname)
|
||||||
|
dnode.addExtraCfg("monitorPort", 7043)
|
||||||
|
dnodes.append(dnode)
|
||||||
|
|
||||||
|
self.TDDnodes = MyDnodes(dnodes)
|
||||||
|
self.TDDnodes.init("")
|
||||||
|
self.TDDnodes.setTestCluster(testCluster)
|
||||||
|
self.TDDnodes.setValgrind(valgrind)
|
||||||
|
|
||||||
|
self.TDDnodes.setAsan(tdDnodes.getAsan())
|
||||||
|
self.TDDnodes.stopAll()
|
||||||
|
for dnode in self.TDDnodes.dnodes:
|
||||||
|
self.TDDnodes.deploy(dnode.index,{})
|
||||||
|
|
||||||
|
for dnode in self.TDDnodes.dnodes:
|
||||||
|
self.TDDnodes.starttaosd(dnode.index)
|
||||||
|
|
||||||
|
# create cluster
|
||||||
|
for dnode in self.TDDnodes.dnodes[1:]:
|
||||||
|
# print(dnode.cfgDict)
|
||||||
|
dnode_id = dnode.cfgDict["fqdn"] + ":" +dnode.cfgDict["serverPort"]
|
||||||
|
dnode_first_host = dnode.cfgDict["firstEp"].split(":")[0]
|
||||||
|
dnode_first_port = dnode.cfgDict["firstEp"].split(":")[-1]
|
||||||
|
cmd = f"{self.getBuildPath()}/build/bin/taos -h {dnode_first_host} -P {dnode_first_port} -s \"create dnode \\\"{dnode_id}\\\"\""
|
||||||
|
print(cmd)
|
||||||
|
os.system(cmd)
|
||||||
|
|
||||||
|
time.sleep(2)
|
||||||
|
tdLog.info(" create cluster done! ")
|
||||||
|
|
||||||
|
def five_dnode_one_mnode(self):
|
||||||
|
tdSql.query("select * from information_schema.ins_dnodes;")
|
||||||
|
tdSql.checkData(0,1,'%s:6030'%self.host)
|
||||||
|
tdSql.checkData(4,1,'%s:6430'%self.host)
|
||||||
|
tdSql.checkData(0,4,'ready')
|
||||||
|
tdSql.checkData(4,4,'ready')
|
||||||
|
tdSql.query("select * from information_schema.ins_mnodes;")
|
||||||
|
tdSql.checkData(0,1,'%s:6030'%self.host)
|
||||||
|
tdSql.checkData(0,2,'leader')
|
||||||
|
tdSql.checkData(0,3,'ready')
|
||||||
|
|
||||||
|
|
||||||
|
tdSql.error("create mnode on dnode 1;")
|
||||||
|
tdSql.error("drop mnode on dnode 1;")
|
||||||
|
|
||||||
|
tdSql.execute("drop database if exists db")
|
||||||
|
tdSql.execute("create database if not exists db replica 1 duration 300")
|
||||||
|
tdSql.execute("use db")
|
||||||
|
tdSql.execute(
|
||||||
|
'''create table stb1
|
||||||
|
(ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp)
|
||||||
|
tags (t1 int)
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
tdSql.execute(
|
||||||
|
'''
|
||||||
|
create table t1
|
||||||
|
(ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp)
|
||||||
|
'''
|
||||||
|
)
|
||||||
|
for i in range(4):
|
||||||
|
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
|
||||||
|
|
||||||
|
tdSql.query('select * from information_schema.ins_databases;')
|
||||||
|
tdSql.checkData(2,5,'on')
|
||||||
|
tdSql.error("alter database db strict 'off'")
|
||||||
|
# tdSql.execute('alter database db strict 'on'')
|
||||||
|
# tdSql.query('select * from information_schema.ins_databases;')
|
||||||
|
# tdSql.checkData(2,5,'on')
|
||||||
|
|
||||||
|
def getConnection(self, dnode):
|
||||||
|
host = dnode.cfgDict["fqdn"]
|
||||||
|
port = dnode.cfgDict["serverPort"]
|
||||||
|
config_dir = dnode.cfgDir
|
||||||
|
return taos.connect(host=host, port=int(port), config=config_dir)
|
||||||
|
|
||||||
|
def check_alive(self):
|
||||||
|
# check cluster alive
|
||||||
|
tdLog.printNoPrefix("======== test cluster alive: ")
|
||||||
|
tdSql.checkDataLoop(0, 0, 1, "show cluster alive;", 20, 0.5)
|
||||||
|
|
||||||
|
tdSql.query("show db.alive;")
|
||||||
|
tdSql.checkData(0, 0, 1)
|
||||||
|
|
||||||
|
# stop 3 dnode
|
||||||
|
self.TDDnodes.stoptaosd(3)
|
||||||
|
tdSql.checkDataLoop(0, 0, 2, "show cluster alive;", 20, 0.5)
|
||||||
|
|
||||||
|
tdSql.query("show db.alive;")
|
||||||
|
tdSql.checkData(0, 0, 2)
|
||||||
|
|
||||||
|
# stop 2 dnode
|
||||||
|
self.TDDnodes.stoptaosd(2)
|
||||||
|
tdSql.checkDataLoop(0, 0, 0, "show cluster alive;", 20, 0.5)
|
||||||
|
|
||||||
|
tdSql.query("show db.alive;")
|
||||||
|
tdSql.checkData(0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
# print(self.master_dnode.cfgDict)
|
||||||
|
self.five_dnode_one_mnode()
|
||||||
|
# check cluster and db alive
|
||||||
|
self.check_alive()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
tdSql.close()
|
||||||
|
tdLog.success(f"{__file__} successfully executed")
|
|
@ -13,7 +13,7 @@ def finish(buf):
|
||||||
mins = pickle.loads(buf)
|
mins = pickle.loads(buf)
|
||||||
min_val = None
|
min_val = None
|
||||||
for min in mins:
|
for min in mins:
|
||||||
if min < min_val:
|
if min_val is None or (min is not None and min < min_val):
|
||||||
min_val = min
|
min_val = min
|
||||||
return min_val
|
return min_val
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,10 @@ def process(block):
|
||||||
rows = []
|
rows = []
|
||||||
for j in range(ncols):
|
for j in range(ncols):
|
||||||
val = block.data(i, j)
|
val = block.data(i, j)
|
||||||
rows.append(val)
|
if type(val) is bytes:
|
||||||
|
rows.append(val.decode('utf-8'))
|
||||||
|
else:
|
||||||
|
rows.append(repr(val))
|
||||||
results.append(','.join(rows))
|
results.append(','.join(rows))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
|
@ -109,6 +109,7 @@ class TDTestCase:
|
||||||
def create_scalar_udfpy(self):
|
def create_scalar_udfpy(self):
|
||||||
# scalar funciton
|
# scalar funciton
|
||||||
self.scalar_funs = {
|
self.scalar_funs = {
|
||||||
|
'sf0': 'timestamp',
|
||||||
'sf1': 'tinyint',
|
'sf1': 'tinyint',
|
||||||
'sf2': 'smallint',
|
'sf2': 'smallint',
|
||||||
'sf3': 'int',
|
'sf3': 'int',
|
||||||
|
@ -121,8 +122,7 @@ class TDTestCase:
|
||||||
'sf10': 'double',
|
'sf10': 'double',
|
||||||
'sf11': 'bool',
|
'sf11': 'bool',
|
||||||
'sf12': 'varchar(20)',
|
'sf12': 'varchar(20)',
|
||||||
'sf13': 'nchar(20)',
|
'sf13': 'nchar(20)'
|
||||||
'sf14': 'timestamp'
|
|
||||||
}
|
}
|
||||||
# agg function
|
# agg function
|
||||||
self.agg_funs = {
|
self.agg_funs = {
|
||||||
|
@ -152,8 +152,8 @@ class TDTestCase:
|
||||||
tdSql.execute(sql)
|
tdSql.execute(sql)
|
||||||
tdLog.info(sql)
|
tdLog.info(sql)
|
||||||
|
|
||||||
def create_udf_af(self, fun_name, out_type, bufsize):
|
def create_udf_af(self, fun_name, filename, out_type, bufsize):
|
||||||
sql = f'create aggregate function {fun_name} as "{self.udf_path}/{fun_name}.py" outputtype {out_type} bufsize {bufsize} language "Python" '
|
sql = f'create aggregate function {fun_name} as "{self.udf_path}/{filename}" outputtype {out_type} bufsize {bufsize} language "Python" '
|
||||||
tdSql.execute(sql)
|
tdSql.execute(sql)
|
||||||
tdLog.info(sql)
|
tdLog.info(sql)
|
||||||
|
|
||||||
|
@ -169,7 +169,7 @@ class TDTestCase:
|
||||||
tdSql.checkData(i, j, result1[i][j])
|
tdSql.checkData(i, j, result1[i][j])
|
||||||
|
|
||||||
# same value like select col1, udf_fun1(col1) from st
|
# same value like select col1, udf_fun1(col1) from st
|
||||||
def verify_same_value(sql):
|
def verify_same_value(self, sql):
|
||||||
tdSql.query(sql)
|
tdSql.query(sql)
|
||||||
nrows = tdSql.getRows()
|
nrows = tdSql.getRows()
|
||||||
for i in range(nrows):
|
for i in range(nrows):
|
||||||
|
@ -188,10 +188,16 @@ class TDTestCase:
|
||||||
|
|
||||||
# query multi-args
|
# query multi-args
|
||||||
def query_multi_args(self):
|
def query_multi_args(self):
|
||||||
cols = self.column_dict.keys() + self.tag_dict.keys()
|
cols = list(self.column_dict.keys()) + list(self.tag_dict.keys())
|
||||||
|
cols.remove("col13")
|
||||||
|
cols.remove("t13")
|
||||||
ncols = len(cols)
|
ncols = len(cols)
|
||||||
|
print(cols)
|
||||||
|
|
||||||
for i in range(2, ncols):
|
for i in range(2, ncols):
|
||||||
sample = random.sample(i)
|
print(i)
|
||||||
|
sample = random.sample(cols, i)
|
||||||
|
print(sample)
|
||||||
cols_name = ','.join(sample)
|
cols_name = ','.join(sample)
|
||||||
sql = f'select sf_multi_args({cols_name}),{cols_name} from {self.stbname}'
|
sql = f'select sf_multi_args({cols_name}),{cols_name} from {self.stbname}'
|
||||||
self.verify_same_multi_values(sql)
|
self.verify_same_multi_values(sql)
|
||||||
|
@ -202,12 +208,13 @@ class TDTestCase:
|
||||||
# col
|
# col
|
||||||
for col_name, col_type in self.column_dict.items():
|
for col_name, col_type in self.column_dict.items():
|
||||||
for fun_name, out_type in self.scalar_funs.items():
|
for fun_name, out_type in self.scalar_funs.items():
|
||||||
sql = f'select {col_name}, {fun_name}({col_name}) from {self.stbname}'
|
if col_type == out_type :
|
||||||
tdLog.info(sql)
|
sql = f'select {col_name}, {fun_name}({col_name}) from {self.stbname}'
|
||||||
self.verify_same_value(sql)
|
tdLog.info(sql)
|
||||||
sql = f'select * from (select {col_name} as a, {fun_name}({col_name}) as b from {self.stbname} ) order by b,a desc'
|
self.verify_same_value(sql)
|
||||||
tdLog.info(sql)
|
sql = f'select * from (select {col_name} as a, {fun_name}({col_name}) as b from {self.stbname} ) order by b,a desc'
|
||||||
self.verify_same_value(sql)
|
tdLog.info(sql)
|
||||||
|
self.verify_same_value(sql)
|
||||||
|
|
||||||
|
|
||||||
# multi-args
|
# multi-args
|
||||||
|
@ -216,42 +223,48 @@ class TDTestCase:
|
||||||
# create aggregate
|
# create aggregate
|
||||||
def create_aggr_udfpy(self):
|
def create_aggr_udfpy(self):
|
||||||
# all type check null
|
# all type check null
|
||||||
for col_name, col_type in self.column_dict:
|
for col_name, col_type in self.column_dict.items():
|
||||||
self.create_udf_af(f"af_null_{col_name}", f"{col_type}", 10*1024*1024)
|
self.create_udf_af(f"af_null_{col_name}", "af_null.py", col_type, 10*1024)
|
||||||
|
|
||||||
# min
|
# min
|
||||||
self.create_udf_af(f"af_min_float", f"float", 10*1024*1024)
|
file_name = "af_min.py"
|
||||||
self.create_udf_af(f"af_min_int", f"int", 10*1024*1024)
|
fun_name = "af_min_float"
|
||||||
|
self.create_udf_af(fun_name, file_name, f"float", 10*1024)
|
||||||
|
fun_name = "af_min_int"
|
||||||
|
self.create_udf_af(fun_name, file_name, f"int", 10*1024)
|
||||||
|
|
||||||
# sum
|
# sum
|
||||||
self.create_udf_af(f"af_sum_float", f"float", 100*1024*1024)
|
file_name = "af_sum.py"
|
||||||
self.create_udf_af(f"af_sum_int", f"sum", 100*1024*1024)
|
fun_name = "af_sum_float"
|
||||||
|
self.create_udf_af(fun_name, file_name, f"float", 10*1024)
|
||||||
|
fun_name = "af_sum_int"
|
||||||
|
self.create_udf_af(fun_name, file_name, f"int", 10*1024)
|
||||||
|
|
||||||
|
|
||||||
# query aggregate
|
# query aggregate
|
||||||
def query_aggr_udfpy(self) :
|
def query_aggr_udfpy(self) :
|
||||||
# all type check null
|
# all type check null
|
||||||
for col_name, col_type in self.column_dict:
|
for col_name, col_type in self.column_dict.items():
|
||||||
fun_name = f"af_null_{col_name}"
|
fun_name = f"af_null_{col_name}"
|
||||||
sql = f'select {fun_name}(col_name) from {self.stbname}'
|
sql = f'select {fun_name}({col_name}) from {self.stbname}'
|
||||||
tdSql.query(sql)
|
tdSql.query(sql)
|
||||||
tdSql.checkData(0, 0, "NULL")
|
tdSql.checkData(0, 0, "None")
|
||||||
|
|
||||||
# min
|
# min
|
||||||
sql = f'select min(col3), af_min_int(col3) from {self.stbname}'
|
sql = f'select min(col3), af_min_int(col3) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
sql = f'select min(col7), af_min_int(col7) from {self.stbname}'
|
sql = f'select min(col7), af_min_int(col7) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
sql = f'select min(col9), af_min_float(col9) from {self.stbname}'
|
sql = f'select min(col9), af_min_float(col9) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
|
|
||||||
# sum
|
# sum
|
||||||
sql = f'select sum(col3), af_sum_int(col3) from {self.stbname}'
|
sql = f'select sum(col3), af_sum_int(col3) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
sql = f'select sum(col7), af_sum_int(col7) from {self.stbname}'
|
sql = f'select sum(col7), af_sum_int(col7) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
sql = f'select sum(col9), af_sum_float(col9) from {self.stbname}'
|
sql = f'select sum(col9), af_sum_float(col9) from {self.stbname}'
|
||||||
self.verfiy_same_value(sql)
|
self.verify_same_value(sql)
|
||||||
|
|
||||||
|
|
||||||
# insert to child table d1 data
|
# insert to child table d1 data
|
||||||
|
@ -284,7 +297,7 @@ class TDTestCase:
|
||||||
count = 10
|
count = 10
|
||||||
# do
|
# do
|
||||||
self.create_table(stable, tbname, count)
|
self.create_table(stable, tbname, count)
|
||||||
self.insert_data(tbname, 100)
|
self.insert_data(tbname, 10)
|
||||||
|
|
||||||
# scalar
|
# scalar
|
||||||
self.create_scalar_udfpy()
|
self.create_scalar_udfpy()
|
||||||
|
|
Loading…
Reference in New Issue