Merge pull request #26920 from taosdata/test/3.0/TD-31005

test: scan returned values in ci
This commit is contained in:
wade zhang 2024-08-05 08:03:05 +08:00 committed by GitHub
commit 8e01044408
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 475 additions and 11 deletions

View File

@ -4,7 +4,6 @@ import jenkins.model.CauseOfInterruption
docs_only=0
node {
}
def abortPreviousBuilds() {
def currentJobName = env.JOB_NAME
def currentBuildNumber = env.BUILD_NUMBER.toInteger()
@ -71,6 +70,7 @@ def check_docs() {
} else {
echo file_changed
}
env.FILE_CHANGED = file_changed
}
}
def pre_test(){
@ -137,7 +137,7 @@ def pre_test(){
'''
} else {
sh '''
echo "unmatched reposiotry ${CHANGE_URL}"
echo "unmatched repository ${CHANGE_URL}"
'''
}
sh '''
@ -247,7 +247,7 @@ def pre_test_win(){
'''
} else {
bat '''
echo "unmatched reposiotry %CHANGE_URL%"
echo "unmatched repository %CHANGE_URL%"
'''
}
}
@ -350,7 +350,6 @@ pipeline {
when {
allOf {
not { expression { env.CHANGE_BRANCH =~ /docs\// }}
not { expression { env.CHANGE_URL =~ /\/TDinternal\// }}
}
}
parallel {
@ -419,6 +418,10 @@ pipeline {
timeout(time: 200, unit: 'MINUTES'){
pre_test()
script {
sh '''
mkdir -p ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}
echo "''' + env.FILE_CHANGED + '''" > ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}/docs_changed.txt
'''
sh '''
date
rm -rf ${WKC}/debug
@ -450,6 +453,10 @@ pipeline {
}
}
}
sh '''
cd ${WKC}/tests/parallel_test
./run_scan_container.sh -d ${WKDIR} -b ${BRANCH_NAME}_${BUILD_ID} -f ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}/docs_changed.txt ''' + extra_param + '''
'''
sh '''
cd ${WKC}/tests/parallel_test
export DEFAULT_RETRY_TIME=2

View File

@ -0,0 +1,24 @@
match callExpr(
hasParent(anyOf(
compoundStmt(),
doStmt(hasCondition(expr().bind("cond"))))
),
unless(hasType(voidType())),
unless(callee(functionDecl(hasName("memcpy")))),
unless(callee(functionDecl(hasName("strcpy")))),
unless(callee(functionDecl(hasName("strcat")))),
unless(callee(functionDecl(hasName("strncpy")))),
unless(callee(functionDecl(hasName("memset")))),
unless(callee(functionDecl(hasName("memmove")))),
unless(callee(functionDecl(hasName("sprintf")))),
unless(callee(functionDecl(hasName("snprintf")))),
unless(callee(functionDecl(hasName("scanf")))),
unless(callee(functionDecl(hasName("sncanf")))),
unless(callee(functionDecl(hasName("printf")))),
unless(callee(functionDecl(hasName("printRow")))),
unless(callee(functionDecl(hasName("puts")))),
unless(callee(functionDecl(hasName("sleep")))),
unless(callee(functionDecl(hasName("printResult")))),
unless(callee(functionDecl(hasName("getchar")))),
unless(callee(functionDecl(hasName("taos_print_row")))),
unless(callee(functionDecl(hasName("fprintf")))))

106
tests/ci/scan.py Normal file
View File

@ -0,0 +1,106 @@
import os
import subprocess
import csv
from datetime import datetime
from loguru import logger
# log file path
log_file_path = "/root/charles/scan.log"
logger.add(log_file_path, rotation="10MB", retention="7 days", level="DEBUG")
# scan result base path
scan_result_base_path = "/root/charles/clang_scan_result/"
# the base source code file path
source_path = "/root/charles/TDinternal/"
# the compile commands json file path
compile_commands_path = "/root/charles/TDinternal/debug/compile_commands.json"
# the ast parser rule for c file
clang_scan_rules_path = "/root/charles/clang_scan_rules"
# all the c files path will be checked
all_file_path = []
class CommandExecutor:
def __init__(self):
self._process = None
def execute(self, command, timeout=None):
try:
self._process = subprocess.Popen(command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = self._process.communicate(timeout=timeout)
return stdout.decode('utf-8'), stderr.decode('utf-8')
except subprocess.TimeoutExpired:
self._process.kill()
self._process.communicate()
raise Exception("Command execution timeout")
except Exception as e:
raise Exception("Command execution failed: %s" % e)
def scan_files_path(source_file_path):
# scan_dir_list = ["source", "include", "docs/examples", "tests/script/api", "src/plugins"]
scan_dir_list = ["source", "include", "docs/examples", "src/plugins"]
scan_skip_file_list = ["/root/charles/TDinternal/community/tools/taosws-rs/target/release/build/openssl-sys-7811e597b848e397/out/openssl-build/install/include/openssl",
"/test/", "contrib", "debug", "deps", "/root/charles/TDinternal/community/source/libs/parser/src/sql.c", "/root/charles/TDinternal/community/source/client/jni/windows/win32/bridge/AccessBridgeCalls.c"]
for root, dirs, files in os.walk(source_file_path):
for file in files:
if any(item in root for item in scan_dir_list):
file_path = os.path.join(root, file)
if (file_path.endswith(".c") or file_path.endswith(".h") or file_path.endswith(".cpp")) and all(item not in file_path for item in scan_skip_file_list):
all_file_path.append(file_path)
logger.info("Found %s files" % len(all_file_path))
def save_scan_res(res_base_path, file_path, out, err):
file_res_path = os.path.join(res_base_path, file_path.replace("/root/charles/", "").split(".")[0] + ".res")
if not os.path.exists(os.path.dirname(file_res_path)):
os.makedirs(os.path.dirname(file_res_path))
logger.info("Save scan result to: %s" % file_res_path)
# save scan result
with open(file_res_path, "w") as f:
f.write(out)
f.write(err)
def write_csv(file_path, data):
try:
with open(file_path, 'w') as f:
writer = csv.writer(f)
writer.writerows(data)
except Exception as ex:
raise Exception("Failed to write the csv file: {} with msg: {}".format(file_path, repr(ex)))
if __name__ == "__main__":
command_executor = CommandExecutor()
# get all the c files path
scan_files_path(source_path)
res = []
# create dir
current_time = datetime.now().strftime("%Y%m%d%H%M%S")
scan_result_path = os.path.join(scan_result_base_path, current_time)
if not os.path.exists(scan_result_path):
os.makedirs(scan_result_path)
for file in all_file_path:
cmd = "clang-query -p %s %s -f %s" % (compile_commands_path, file, clang_scan_rules_path)
try:
stdout, stderr = command_executor.execute(cmd)
lines = stdout.split("\n")
if lines[-2].endswith("matches.") or lines[-2].endswith("match."):
match_num = int(lines[-2].split(" ")[0])
logger.info("The match lines of file %s: %s" % (file, match_num))
if match_num > 0:
save_scan_res(scan_result_path, file, stdout, stderr)
res.append([file, match_num, 'Pass' if match_num == 0 else 'Fail'])
else:
logger.warning("The result of scan is invalid for: %s" % file)
except Exception as e:
logger.error("Execute command failed: %s" % e)
# data = ""
# for item in res:
# data += item[0] + "," + str(item[1]) + "\n"
# logger.info("Csv data: %s" % data)
write_csv(os.path.join(scan_result_path, "scan_res.csv"), res)
logger.info("The result of scan: \n")
logger.info("Total files: %s" % len(res))
logger.info("Total match lines: %s" % sum([item[1] for item in res]))
logger.info("Pass files: %s" % len([item for item in res if item[2] == 'Pass']))
logger.info("Fail files: %s" % len([item for item in res if item[2] == 'Fail']))

229
tests/ci/scan_file_path.py Normal file
View File

@ -0,0 +1,229 @@
import os
import sys
import subprocess
import csv
from datetime import datetime
from loguru import logger
import getopt
opts, args = getopt.gnu_getopt(sys.argv[1:], 'b:f:w:', [
'branch_name='])
for key, value in opts:
if key in ['-h', '--help']:
print(
'Usage: python3 scan.py -b <branch_name> -f <file_list>')
print('-b branch name or PR ID to scan')
print('-f change files list')
print('-w web server')
sys.exit(0)
if key in ['-b', '--branchName']:
branch_name = value
if key in ['-f', '--filesName']:
change_file_list = value
if key in ['-w', '--webServer']:
web_server = value
# the base source code file path
self_path = os.path.dirname(os.path.realpath(__file__))
# if ("community" in self_path):
# TD_project_path = self_path[:self_path.find("community")]
# work_path = TD_project_path[:TD_project_path.find("TDinternal")]
# else:
# TD_project_path = self_path[:self_path.find("tests")]
# work_path = TD_project_path[:TD_project_path.find("TDengine")]
# Check if "community" or "tests" is in self_path
index_community = self_path.find("community")
if index_community != -1:
TD_project_path = self_path[:index_community]
index_TDinternal = TD_project_path.find("TDinternal")
# Check if index_TDinternal is valid and set work_path accordingly
if index_TDinternal != -1:
work_path = TD_project_path[:index_TDinternal]
else:
index_tests = self_path.find("tests")
if index_tests != -1:
TD_project_path = self_path[:index_tests]
# Check if index_TDengine is valid and set work_path accordingly
index_TDengine = TD_project_path.find("TDengine")
if index_TDengine != -1:
work_path = TD_project_path[:index_TDengine]
# log file path
current_time = datetime.now().strftime("%Y%m%d-%H%M%S")
log_file_path = f"{work_path}/scan_log/scan_{branch_name}_{current_time}/"
os.makedirs(log_file_path, exist_ok=True)
scan_log_file = f"{log_file_path}/scan_log.txt"
logger.add(scan_log_file, rotation="10MB", retention="7 days", level="DEBUG")
#if error happens, open this to debug
# print(self_path,work_path,TD_project_path,log_file_path,change_file_list)
# scan result base path
scan_result_base_path = f"{log_file_path}/clang_scan_result/"
# the compile commands json file path
# compile_commands_path = f"{work_path}/debugNoSan/compile_commands.json"
compile_commands_path = f"{TD_project_path}/debug/compile_commands.json"
#if error happens, open this to debug
# print(f"compile_commands_path:{compile_commands_path}")
# # replace the docerk worf path with real work path in compile_commands.json
# docker_work_path = "home"
# replace_path= work_path[1:-1]
# replace_path = replace_path.replace("/", "\/")
# sed_command = f"sed -i 's/{docker_work_path}/{replace_path}/g' {compile_commands_path}"
# print(sed_command)
# result = subprocess.run(sed_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
# logger.debug(f"STDOUT: {result.stdout} STDERR: {result.stderr}")
# the ast parser rule for c file
clang_scan_rules_path = f"{self_path}/filter_for_return_values"
#
# all the c files path will be checked
all_file_path = []
class CommandExecutor:
def __init__(self):
self._process = None
def execute(self, command, timeout=None):
try:
self._process = subprocess.Popen(command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = self._process.communicate(timeout=timeout)
return stdout.decode('utf-8'), stderr.decode('utf-8')
except subprocess.TimeoutExpired:
self._process.kill()
self._process.communicate()
raise Exception("Command execution timeout")
except Exception as e:
raise Exception("Command execution failed: %s" % e)
def scan_files_path(source_file_path):
# scan_dir_list = ["source", "include", "docs/examples", "tests/script/api", "src/plugins"]
scan_dir_list = ["source", "include", "docs/examples", "src/plugins"]
scan_skip_file_list = ["/root/charles/TDinternal/community/tools/taosws-rs/target/release/build/openssl-sys-7811e597b848e397/out/openssl-build/install/include/openssl",
"/test/", "contrib", "debug", "deps", "/root/charles/TDinternal/community/source/libs/parser/src/sql.c", "/root/charles/TDinternal/community/source/client/jni/windows/win32/bridge/AccessBridgeCalls.c"]
for root, dirs, files in os.walk(source_file_path):
for file in files:
if any(item in root for item in scan_dir_list):
file_path = os.path.join(root, file)
if (file_path.endswith(".c") or file_path.endswith(".h") or file_path.endswith(".cpp")) and all(item not in file_path for item in scan_skip_file_list):
all_file_path.append(file_path)
logger.info("Found %s files" % len(all_file_path))
def input_files(change_files):
# scan_dir_list = ["source", "include", "docs/examples", "tests/script/api", "src/plugins"]
scan_dir_list = ["source", "include", "docs/examples", "src/plugins"]
scan_skip_file_list = [f"{TD_project_path}/TDinternal/community/tools/taosws-rs/target/release/build/openssl-sys-7811e597b848e397/out/openssl-build/install/include/openssl", "/test/", "contrib", "debug", "deps", f"{TD_project_path}/TDinternal/community/source/libs/parser/src/sql.c", f"{TD_project_path}/TDinternal/community/source/client/jni/windows/win32/bridge/AccessBridgeCalls.c"]
with open(change_files, 'r') as file:
for line in file:
file_name = line.strip()
if any(dir_name in file_name for dir_name in scan_dir_list):
if (file_name.endswith(".c") or file_name.endswith(".h") or line.endswith(".cpp")) and all(dir_name not in file_name for dir_name in scan_skip_file_list):
if "enterprise" in file_name:
file_name = os.path.join(TD_project_path, file_name)
else:
tdc_file_path = os.path.join(TD_project_path, "community/")
file_name = os.path.join(tdc_file_path, file_name)
all_file_path.append(file_name)
# print(f"all_file_path:{all_file_path}")
logger.info("Found %s files" % len(all_file_path))
file_res_path = ""
def save_scan_res(res_base_path, file_path, out, err):
global file_res_path
file_res_path = os.path.join(res_base_path, file_path.replace(f"{work_path}", "").split(".")[0] + ".txt")
# print(f"file_res_path:{file_res_path},res_base_path:{res_base_path},file_path:{file_path}")
if not os.path.exists(os.path.dirname(file_res_path)):
os.makedirs(os.path.dirname(file_res_path))
logger.info("Save scan result to: %s" % file_res_path)
# save scan result
with open(file_res_path, "w") as f:
f.write(err)
f.write(out)
logger.debug(f"file_res_file: {file_res_path}")
def write_csv(file_path, data):
try:
with open(file_path, 'w') as f:
writer = csv.writer(f)
writer.writerows(data)
except Exception as ex:
raise Exception("Failed to write the csv file: {} with msg: {}".format(file_path, repr(ex)))
if __name__ == "__main__":
command_executor = CommandExecutor()
# get all the c files path
# scan_files_path(TD_project_path)
input_files(change_file_list)
# print(f"all_file_path:{all_file_path}")
res = []
web_path = []
res.append(["scan_source_file", "scan_result_file", "match_num", "check_result"])
# create dir
# current_time = datetime.now().strftime("%Y%m%d%H%M%S")
# scan_result_path = os.path.join(scan_result_base_path, current_time)
# scan_result_path = scan_result_base_path
# if not os.path.exists(scan_result_path):
# os.makedirs(scan_result_path)
for file in all_file_path:
cmd = f"clang-query-10 -p {compile_commands_path} {file} -f {clang_scan_rules_path}"
logger.debug(f"cmd:{cmd}")
try:
stdout, stderr = command_executor.execute(cmd)
#if "error" in stderr:
# print(stderr)
lines = stdout.split("\n")
if lines[-2].endswith("matches.") or lines[-2].endswith("match."):
match_num = int(lines[-2].split(" ")[0])
logger.info("The match lines of file %s: %s" % (file, match_num))
if match_num > 0:
logger.info(f"log_file_path: {log_file_path} ,file:{file}")
save_scan_res(log_file_path, file, stdout, stderr)
index_tests = file_res_path.find("scan_log")
if index_tests != -1:
web_path_file = file_res_path[index_tests:]
web_path_file = os.path.join(web_server, web_path_file)
web_path.append(web_path_file)
res.append([file, file_res_path, match_num, 'Pass' if match_num == 0 else 'Fail'])
else:
logger.warning("The result of scan is invalid for: %s" % file)
except Exception as e:
logger.error("Execute command failed: %s" % e)
# data = ""
# for item in res:
# data += item[0] + "," + str(item[1]) + "\n"
# logger.info("Csv data: %s" % data)
write_csv(os.path.join(log_file_path, "scan_res.txt"), res)
scan_result_log = f"{log_file_path}/scan_res.txt"
# delete the first element of res
res= res[1:]
logger.info("The result of scan: \n")
logger.info("Total scan files: %s" % len(res))
logger.info("Total match lines: %s" % sum([item[2] for item in res]))
logger.info(f"scan log file : {scan_result_log}")
logger.info("Pass files: %s" % len([item for item in res if item[3] == 'Pass']))
logger.info("Fail files: %s" % len([item for item in res if item[3] == 'Fail']))
if len([item for item in res if item[3] == 'Fail']) > 0:
logger.error(f"Scan failed,please check the log file:{scan_result_log}")
for index, failed_result_file in enumerate(web_path):
logger.error(f"failed number: {index}, failed_result_file: {failed_result_file}")
exit(1)

View File

@ -83,7 +83,7 @@ docker run \
-v ${REP_REAL_PATH}/community/contrib/xml2/:${REP_DIR}/community/contrib/xml2 \
-v ${REP_REAL_PATH}/community/contrib/zlib/:${REP_DIR}/community/contrib/zlib \
-v ${REP_REAL_PATH}/community/contrib/zstd/:${REP_DIR}/community/contrib/zstd \
--rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y;pip3 install taospy==2.7.2;cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0;make -j 10|| exit 1"
--rm --ulimit core=-1 taos_test:v1.0 sh -c "pip uninstall taospy -y;pip3 install taospy==2.7.2;cd $REP_DIR;rm -rf debug;mkdir -p debug;cd debug;cmake .. -DBUILD_HTTP=false -DBUILD_TOOLS=true -DBUILD_TEST=true -DWEBSOCKET=true -DBUILD_TAOSX=false -DJEMALLOC_ENABLED=0 -DCMAKE_EXPORT_COMPILE_COMMANDS=1 ;make -j 10|| exit 1"
# -v ${REP_REAL_PATH}/community/contrib/jemalloc/:${REP_DIR}/community/contrib/jemalloc \
if [[ -d ${WORKDIR}/debugNoSan ]] ;then

View File

@ -0,0 +1,98 @@
#!/bin/bash
function usage() {
echo "$0"
echo -e "\t -d work dir"
echo -e "\t -b pr and id"
echo -e "\t -w web server "
echo -e "\t -f scan file "
echo -e "\t -h help"
}
while getopts "d:b:w:f:h" opt; do
case $opt in
d)
WORKDIR=$OPTARG
;;
b)
branch_name_id=$OPTARG
;;
f)
scan_file_name=$OPTARG
;;
w)
web_server=$OPTARG
;;
h)
usage
exit 0
;;
\?)
echo "Invalid option: -$OPTARG"
usage
exit 0
;;
esac
done
if [ -z "$branch_name_id" ]; then
usage
exit 1
fi
if [ -z "$scan_file_name" ]; then
usage
exit 1
fi
if [ -z "$WORKDIR" ]; then
usage
exit 1
fi
if [ -z "$web_server" ]; then
usage
exit 1
fi
# enterprise edition
INTERNAL_REPDIR=$WORKDIR/TDinternal
REPDIR_DEBUG=$WORKDIR/debugNoSan/
REP_MOUNT_DEBUG="${REPDIR_DEBUG}:/home/TDinternal/debug/"
REP_MOUNT_PARAM="$INTERNAL_REPDIR:/home/TDinternal"
CONTAINER_TESTDIR=/home/TDinternal/community
#scan change file path
scan_changefile_temp_path="$WORKDIR/tmp/${branch_name_id}/"
docker_can_changefile_temp_path="/home/tmp/${branch_name_id}/"
mkdir -p $scan_changefile_temp_path
scan_file_name="$docker_can_changefile_temp_path/docs_changed.txt"
#scan log file path
scan_log_temp_path="$WORKDIR/log/scan_log/"
docker_scan_log_temp_path="/home/scan_log/"
mkdir -p $scan_log_temp_path
scan_scripts="$CONTAINER_TESTDIR/tests/ci/scan_file_path.py"
ulimit -c unlimited
cat << EOF
docker run \
-v $REP_MOUNT_PARAM \
-v $REP_MOUNT_DEBUG \
-v $scan_changefile_temp_path:$docker_can_changefile_temp_path \
-v $scan_log_temp_path:$docker_scan_log_temp_path \
--rm --ulimit core=-1 taos_test:v1.0 python3 $scan_scripts -b "${branch_name_id}" -f "${scan_file_name}" -w ${web_server}
EOF
docker run \
-v $REP_MOUNT_PARAM \
-v $REP_MOUNT_DEBUG \
-v $scan_changefile_temp_path:$docker_can_changefile_temp_path \
-v $scan_log_temp_path:$docker_scan_log_temp_path \
--rm --ulimit core=-1 taos_test:v1.0 python3 $scan_scripts -b "${branch_name_id}" -f "${scan_file_name}" -w ${web_server}
ret=$?
exit $ret

View File

@ -36,7 +36,7 @@ if $data(3)[4] != ready then
goto step1
endi
print =============== step2: create mnode 2
print =============== step2: create mnode 2 3
sql create mnode on dnode 2
sql create mnode on dnode 3
sql_error create mnode on dnode 4
@ -115,7 +115,7 @@ if $data(3)[4] != ready then
goto step41
endi
print =============== step5: stop dnode1
print =============== step5: stop dnode2
system sh/exec.sh -n dnode1 -s start
system sh/exec.sh -n dnode2 -s stop
@ -154,7 +154,7 @@ if $data(3)[4] != ready then
goto step51
endi
print =============== step6: stop dnode1
print =============== step6: stop dnode3
system sh/exec.sh -n dnode2 -s start
system sh/exec.sh -n dnode3 -s stop

View File

@ -1504,9 +1504,9 @@ class TDTestCase:
# max number of list is 4093: 4096 - 3 - 2(原始表tag个数) - 1(tbname)
tdSql.execute('use db4096')
self.create_tsma('tsma_4050', 'db4096', 'stb0', self.generate_tsma_function_list_columns(4050), '5m',check_tsma_calculation=False)
self.create_tsma('tsma_4050', 'db4096', 'stb0', self.generate_tsma_function_list_columns(4050), '5m',check_tsma_calculation=True)
self.create_tsma('tsma_4090', 'db4096', 'stb0', self.generate_tsma_function_list_columns(4090), '6m',check_tsma_calculation=False)
self.create_tsma('tsma_4090', 'db4096', 'stb0', self.generate_tsma_function_list_columns(4090), '6m',check_tsma_calculation=True)
self.create_error_tsma('tsma_4091', 'db4096', 'stb0', self.generate_tsma_function_list_columns(4091), '5m', -2147473856) #Too many columns