From 3890def22bd7df670f594c7a1de0b84e20d1e315 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 9 Oct 2024 18:36:06 +0800 Subject: [PATCH 001/102] lib_azure: base on ms ablob sdk --- .gitignore | 1 + cmake/azure_CMakeLists.txt.in | 15 +++++++ cmake/cmake.define | 3 +- contrib/CMakeLists.txt | 9 ++++ contrib/azure-cmake/CMakeLists.txt | 70 ++++++++++++++++++++++++++++++ contrib/test/CMakeLists.txt | 1 + 6 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 cmake/azure_CMakeLists.txt.in create mode 100644 contrib/azure-cmake/CMakeLists.txt diff --git a/.gitignore b/.gitignore index 1798a920eb..03b30a2224 100644 --- a/.gitignore +++ b/.gitignore @@ -121,6 +121,7 @@ TAGS contrib/* !contrib/CMakeLists.txt !contrib/test +!contrib/azure-cmake sql debug*/ .env diff --git a/cmake/azure_CMakeLists.txt.in b/cmake/azure_CMakeLists.txt.in new file mode 100644 index 0000000000..5aa32b70e5 --- /dev/null +++ b/cmake/azure_CMakeLists.txt.in @@ -0,0 +1,15 @@ +# azure +ExternalProject_Add(azure + URL https://github.com/Azure/azure-sdk-for-cpp/archive/refs/tags/azure-storage-blobs_12.13.0-beta.1.tar.gz + URL_HASH SHA256=3eca486fd60e3522d0a633025ecd652a71515b1e944799b2e8ee31fd590305a9 + DOWNLOAD_NO_PROGRESS 1 + DOWNLOAD_DIR "${TD_CONTRIB_DIR}/deps-download" + SOURCE_DIR "${TD_CONTRIB_DIR}/azure-sdk-for-cpp-azure-storage-blobs_12.13.0-beta.1" + #BUILD_IN_SOURCE TRUE + #BUILD_ALWAYS 1 + #UPDATE_COMMAND "" + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + TEST_COMMAND "" +) diff --git a/cmake/cmake.define b/cmake/cmake.define index eb78b54cae..c7a58cd349 100644 --- a/cmake/cmake.define +++ b/cmake/cmake.define @@ -208,7 +208,8 @@ ELSE () IF (${BUILD_SANITIZER}) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") + #SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") MESSAGE(STATUS "Compile with Address Sanitizer!") ELSEIF (${BUILD_RELEASE}) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}") diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 2a38bf74c7..d6ae5497f1 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -152,6 +152,7 @@ if(${BUILD_WITH_S3}) cat("${TD_SUPPORT_DIR}/xml2_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) cat("${TD_SUPPORT_DIR}/libs3_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) + cat("${TD_SUPPORT_DIR}/azure_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) add_definitions(-DUSE_S3) # cos @@ -614,9 +615,17 @@ if (${BUILD_PCRE2}) add_subdirectory(pcre2 EXCLUDE_FROM_ALL) endif(${BUILD_PCRE2}) + +if(${TD_LINUX}) + add_subdirectory(azure-cmake EXCLUDE_FROM_ALL) +endif(${TD_LINUX}) + # ================================================================================================ # Build test # ================================================================================================ + +MESSAGE("build with dependency tests: ${BUILD_DEPENDENCY_TESTS}") + if(${BUILD_DEPENDENCY_TESTS}) add_subdirectory(test EXCLUDE_FROM_ALL) endif(${BUILD_DEPENDENCY_TESTS}) diff --git a/contrib/azure-cmake/CMakeLists.txt b/contrib/azure-cmake/CMakeLists.txt new file mode 100644 index 0000000000..e4624361ed --- /dev/null +++ b/contrib/azure-cmake/CMakeLists.txt @@ -0,0 +1,70 @@ +# lib_azure_sdk +set(AZURE_DIR "${TD_CONTRIB_DIR}/azure-sdk-for-cpp-azure-storage-blobs_12.13.0-beta.1") +set(AZURE_SDK_LIBRARY_DIR "${AZURE_DIR}/sdk") + +file(GLOB AZURE_SDK_SRC + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/credentials/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/cryptography/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/http/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/http/curl/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/io/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/tracing/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/identity/azure-identity/src/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/src/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/src/private/*.cpp" + "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-common/src/*.cpp" +) + +file(GLOB AZURE_SDK_UNIFIED_SRC + ${AZURE_SDK_SRC} +) + +set(AZURE_SDK_INCLUDES + "${AZURE_SDK_LIBRARY_DIR}/core/azure-core/inc/" + "${AZURE_SDK_LIBRARY_DIR}/identity/azure-identity/inc/" + "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-common/inc/" + "${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/inc/" +) + +add_library(_azure_sdk STATIC ${AZURE_SDK_UNIFIED_SRC}) +target_compile_definitions(_azure_sdk PRIVATE BUILD_CURL_HTTP_TRANSPORT_ADAPTER) + + target_include_directories( + _azure_sdk + PUBLIC "$ENV{HOME}/.cos-local.2/include" + ) + +find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +#find_library(CURL_LIBRARY curl) +#find_library(XML2_LIBRARY xml2) +find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +#find_library(CoreFoundation_Library CoreFoundation) +#find_library(SystemConfiguration_Library SystemConfiguration) + +target_link_libraries( + _azure_sdk + PRIVATE ${CURL_LIBRARY} + PRIVATE ${SSL_LIBRARY} + PRIVATE ${CRYPTO_LIBRARY} + PRIVATE ${XML2_LIBRARY} + #PRIVATE xml2 + PRIVATE zlib +# PRIVATE ${CoreFoundation_Library} +# PRIVATE ${SystemConfiguration_Library} +) + +# Originally, on Windows azure-core is built with bcrypt and crypt32 by default +if (TARGET OpenSSL::SSL) + target_link_libraries(_azure_sdk PRIVATE OpenSSL::Crypto OpenSSL::SSL) +endif() + +# Originally, on Windows azure-core is built with winhttp by default +if (TARGET td_contrib::curl) + target_link_libraries(_azure_sdk PRIVATE td_contrib::curl) +endif() + +target_include_directories(_azure_sdk SYSTEM BEFORE PUBLIC ${AZURE_SDK_INCLUDES}) +add_library(td_contrib::azure_sdk ALIAS _azure_sdk) diff --git a/contrib/test/CMakeLists.txt b/contrib/test/CMakeLists.txt index 1deff5a67e..f544baafde 100644 --- a/contrib/test/CMakeLists.txt +++ b/contrib/test/CMakeLists.txt @@ -28,5 +28,6 @@ if(${BUILD_WITH_TRAFT}) # add_subdirectory(traft) endif(${BUILD_WITH_TRAFT}) +add_subdirectory(azure) add_subdirectory(tdev) add_subdirectory(lz4) From c6018cbfaaac825036713ed5c27632848abdb7c5 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 08:58:02 +0800 Subject: [PATCH 002/102] lib_azure: test case --- contrib/test/azure/CMakeLists.txt | 29 +++++++++++++++++ contrib/test/azure/main.cpp | 54 +++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 contrib/test/azure/CMakeLists.txt create mode 100644 contrib/test/azure/main.cpp diff --git a/contrib/test/azure/CMakeLists.txt b/contrib/test/azure/CMakeLists.txt new file mode 100644 index 0000000000..b3db1dffce --- /dev/null +++ b/contrib/test/azure/CMakeLists.txt @@ -0,0 +1,29 @@ +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +add_executable ( + azure-test + main.cpp +) + +# Link to Azure SDK +#target_link_libraries(application _azure_sdk) + +find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +#find_library(XML2_LIBRARY xml2) +find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) +#find_library(CoreFoundation_Library CoreFoundation) +#find_library(SystemConfiguration_Library SystemConfiguration) + + target_link_libraries( + azure-test + PRIVATE _azure_sdk + PRIVATE ${CURL_LIBRARY} + PRIVATE ${XML2_LIBRARY} + PRIVATE ${SSL_LIBRARY} + PRIVATE ${CRYPTO_LIBRARY} + PRIVATE dl + PRIVATE pthread +) diff --git a/contrib/test/azure/main.cpp b/contrib/test/azure/main.cpp new file mode 100644 index 0000000000..5d52801329 --- /dev/null +++ b/contrib/test/azure/main.cpp @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +/** + * @file + * @brief Application that consumes the Azure SDK for C++. + * + * @remark Set environment variable `STORAGE_CONNECTION_STRING` before running the application. + * + */ + +#include + +#include +#include + +using namespace Azure::Storage::Blobs; + +int main(int argc, char* argv[]) +{ + (void)argc; + (void)argv; + + /**************** Container SDK client ************************/ + /**************** Create container ************************/ + try + { + auto containerClient = BlobContainerClient::CreateFromConnectionString( + std::getenv("STORAGE_CONNECTION_STRING"), "td-test"); + //containerClient.CreateIfNotExists(); + + /**************** Container SDK client ************************/ + /**************** list blobs (one page) ******************/ + //auto response = containerClient.ListBlobsSinglePage(); + //auto response = containerClient.ListBlobs(); + //auto blobListPage = response.Value; + //auto blobListPage = response.Blobs; + for (auto page = containerClient.ListBlobs(/*options*/); page.HasPage(); page.MoveToNextPage()) + { + for (auto& blob : page.Blobs) + { + std::cout << blob.Name << std::endl; + } + } + + } + catch (const std::exception& ex) + { + std::cout << ex.what(); + return 1; + } + + return 0; +} From fdc5d6c62574edfb3b7e53207e64b7d335c35ec5 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 09:56:43 +0800 Subject: [PATCH 003/102] cos: prep cos from tcs integration --- include/common/cos.h | 2 ++ source/common/src/cos.c | 22 ++++++++-------------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/include/common/cos.h b/include/common/cos.h index b336a1e5ee..53dc161ee5 100644 --- a/include/common/cos.h +++ b/include/common/cos.h @@ -32,6 +32,8 @@ extern int32_t tsS3PageCacheSize; extern int32_t tsS3UploadDelaySec; int32_t s3Init(); +int32_t s3Begin(); +void s3End(); int32_t s3CheckCfg(); int32_t s3PutObjectFromFile(const char *file, const char *object); int32_t s3PutObjectFromFile2(const char *file, const char *object, int8_t withcp); diff --git a/source/common/src/cos.c b/source/common/src/cos.c index c2b9fe34e1..47dc629c73 100644 --- a/source/common/src/cos.c +++ b/source/common/src/cos.c @@ -89,20 +89,8 @@ static void s3DumpCfgByEp(int8_t epIndex) { int32_t s3CheckCfg() { int32_t code = 0, lino = 0; - int8_t i = 0; - if (!tsS3Enabled) { - (void)fprintf(stderr, "s3 not configured.\n"); - TAOS_RETURN(code); - } - - code = s3Begin(); - if (code != 0) { - (void)fprintf(stderr, "failed to initialize s3.\n"); - TAOS_RETURN(code); - } - - for (; i < tsS3EpNum; i++) { + for (int8_t i = 0; i < tsS3EpNum; i++) { (void)fprintf(stdout, "test s3 ep (%d/%d):\n", i + 1, tsS3EpNum); s3DumpCfgByEp(i); @@ -192,7 +180,7 @@ int32_t s3CheckCfg() { (void)fprintf(stdout, "=================================================================\n"); } - s3End(); + // s3End(); TAOS_RETURN(code); } @@ -1529,6 +1517,8 @@ void s3EvictCache(const char *path, long object_size) {} #include "cos_http_io.h" #include "cos_log.h" +int32_t s3Begin() { TAOS_RETURN(TSDB_CODE_SUCCESS); } + int32_t s3Init() { if (cos_http_io_initialize(NULL, 0) != COSE_OK) { return -1; @@ -1967,6 +1957,10 @@ long s3Size(const char *object_name) { #else int32_t s3Init() { return 0; } +int32_t s3Begin() { TAOS_RETURN(TSDB_CODE_SUCCESS); } + +void s3End() {} +int32_t s3CheckCfg() { return 0; } int32_t s3PutObjectFromFile(const char *file, const char *object) { return 0; } int32_t s3PutObjectFromFile2(const char *file, const char *object, int8_t withcp) { return 0; } int32_t s3PutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { return 0; } From cd9eec15fb40ccd68116b151d24739d5d8043234 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 10:13:55 +0800 Subject: [PATCH 004/102] tcs: unified interface of cs --- include/libs/tcs/tcs.h | 58 ++++ source/libs/CMakeLists.txt | 4 +- source/libs/tcs/CMakeLists.txt | 22 ++ source/libs/tcs/src/tcs.c | 149 +++++++++++ source/libs/tcs/test/CMakeLists.txt | 18 ++ source/libs/tcs/test/tcsTest.c | 395 ++++++++++++++++++++++++++++ 6 files changed, 645 insertions(+), 1 deletion(-) create mode 100644 include/libs/tcs/tcs.h create mode 100644 source/libs/tcs/CMakeLists.txt create mode 100644 source/libs/tcs/src/tcs.c create mode 100644 source/libs/tcs/test/CMakeLists.txt create mode 100644 source/libs/tcs/test/tcsTest.c diff --git a/include/libs/tcs/tcs.h b/include/libs/tcs/tcs.h new file mode 100644 index 0000000000..530a23d9e9 --- /dev/null +++ b/include/libs/tcs/tcs.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef _TD_TCS_H_ +#define _TD_TCS_H_ + +#include "os.h" +#include "tarray.h" +#include "tdef.h" +#include "tlog.h" +#include "tmsg.h" + +#ifdef __cplusplus +extern "C" { +#endif + +extern int8_t tsS3Enabled; +extern int8_t tsS3EnabledCfg; + +extern int32_t tsS3UploadDelaySec; +extern int32_t tsS3BlockSize; +extern int32_t tsS3BlockCacheSize; +extern int32_t tsS3PageCacheSize; + +extern int8_t tsS3StreamEnabled; + +int32_t tcsInit(); +void tcsUninit(); + +int32_t tcsCheckCfg(); + +int32_t tcsPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size); +int32_t tcsGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock); + +void tcsDeleteObjectsByPrefix(const char *prefix); + +int32_t tcsPutObjectFromFile2(const char *file, const char *object, int8_t withcp); +int32_t tcsGetObjectsByPrefix(const char *prefix, const char *path); +int32_t tcsDeleteObjects(const char *object_name[], int nobject); +int32_t tcsGetObjectToFile(const char *object_name, const char *fileName); + +#ifdef __cplusplus +} +#endif + +#endif // _TD_TCS_H_ diff --git a/source/libs/CMakeLists.txt b/source/libs/CMakeLists.txt index 64209572f4..41a1e99521 100644 --- a/source/libs/CMakeLists.txt +++ b/source/libs/CMakeLists.txt @@ -22,4 +22,6 @@ add_subdirectory(stream) add_subdirectory(planner) add_subdirectory(qworker) add_subdirectory(geometry) -add_subdirectory(command) \ No newline at end of file +add_subdirectory(command) +#add_subdirectory(azure) +add_subdirectory(tcs) diff --git a/source/libs/tcs/CMakeLists.txt b/source/libs/tcs/CMakeLists.txt new file mode 100644 index 0000000000..1c914a18b9 --- /dev/null +++ b/source/libs/tcs/CMakeLists.txt @@ -0,0 +1,22 @@ +aux_source_directory(src TOS_SRC) + +add_library(tcs STATIC ${TOS_SRC}) +target_include_directories( + tcs + PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" +) + +target_link_libraries( + tcs + PUBLIC az + PUBLIC common + # PUBLIC cjson + # PUBLIC os + # PUBLIC util + # PUBLIC crypt +) + +if(${BUILD_TEST}) + add_subdirectory(test) +endif(${BUILD_TEST}) diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c new file mode 100644 index 0000000000..c5c68c4933 --- /dev/null +++ b/source/libs/tcs/src/tcs.c @@ -0,0 +1,149 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#include "tcs.h" + +#include "os.h" +#include "taoserror.h" +#include "tglobal.h" + +//#include "az.h" +#include "cos.h" + +extern int8_t tsS3Ablob; + +typedef enum { + TOS_PROTO_NIL, + TOS_PROTO_S3, + TOS_PROTO_ABLOB, +} STosProto; + +typedef struct { + int32_t (*Begin)(); + void (*End)(); + int32_t (*CheckCfg)(); + + int32_t (*PutObjectFromFileOffset)(const char* file, const char* object_name, int64_t offset, int64_t size); + int32_t (*GetObjectBlock)(const char* object_name, int64_t offset, int64_t size, bool check, uint8_t** ppBlock); + + void (*DeleteObjectsByPrefix)(const char* prefix); + + int32_t (*PutObjectFromFile2)(const char* file, const char* object, int8_t withcp); + int32_t (*GetObjectsByPrefix)(const char* prefix, const char* path); + int32_t (*DeleteObjects)(const char* object_name[], int nobject); + int32_t (*GetObjectToFile)(const char* object_name, const char* fileName); +} STcs; + +static STcs tcs; + +int32_t tcsInit() { + int32_t code = 0; + + STosProto proto = tsS3Ablob ? TOS_PROTO_ABLOB : TOS_PROTO_S3; + + if (TOS_PROTO_S3 == proto) { + tcs.Begin = s3Begin; + tcs.End = s3End; + tcs.CheckCfg = s3CheckCfg; + + tcs.PutObjectFromFileOffset = s3PutObjectFromFileOffset; + tcs.GetObjectBlock = s3GetObjectBlock; + + tcs.DeleteObjectsByPrefix = s3DeleteObjectsByPrefix; + + tcs.PutObjectFromFile2 = s3PutObjectFromFile2; + tcs.GetObjectsByPrefix = s3GetObjectsByPrefix; + tcs.DeleteObjects = s3DeleteObjects; + tcs.GetObjectToFile = s3GetObjectToFile; + } else if (TOS_PROTO_ABLOB == proto) { + /* + tcs.Begin = azBegin; + tcs.End = azEnd; + tcs.CheckCfg = azCheckCfg; + + tcs.PutObjectFromFileOffset = azPutObjectFromFileOffset; + tcs.GetObjectBlock = azGetObjectBlock; + + tcs.DeleteObjectsByPrefix = azDeleteObjectsByPrefix; + + tcs.PutObjectFromFile2 = azPutObjectFromFile2; + tcs.GetObjectsByPrefix = azGetObjectsByPrefix; + tcs.DeleteObjects = azDeleteObjects; + tcs.GetObjectToFile = azGetObjectToFile; + */ + } else { + code = TSDB_CODE_INVALID_PARA; + return code; + } + + code = tcs.Begin(); + + return code; +} + +void tcsUninit() { tcs.End(); } + +int32_t tcsCheckCfg() { + int32_t code = 0; + + if (!tsS3Enabled) { + (void)fprintf(stderr, "s3 not configured.\n"); + TAOS_RETURN(code); + } + + code = tcsInit(); + if (code != 0) { + (void)fprintf(stderr, "failed to initialize s3.\n"); + TAOS_RETURN(code); + } + + code = s3Begin(); + if (code != 0) { + (void)fprintf(stderr, "failed to begin s3.\n"); + TAOS_RETURN(code); + } + + code = tcs.CheckCfg(); + if (code != 0) { + (void)fprintf(stderr, "failed to check s3.\n"); + TAOS_RETURN(code); + } + + tcsUninit(); + + return code; +} + +int32_t tcsPutObjectFromFileOffset(const char* file, const char* object_name, int64_t offset, int64_t size) { + return tcs.PutObjectFromFileOffset(file, object_name, offset, size); +} + +int32_t tcsGetObjectBlock(const char* object_name, int64_t offset, int64_t size, bool check, uint8_t** ppBlock) { + return tcs.GetObjectBlock(object_name, offset, size, check, ppBlock); +} + +void tcsDeleteObjectsByPrefix(const char* prefix) { return tcs.DeleteObjectsByPrefix(prefix); } + +int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { + return tcs.PutObjectFromFile2(file, object, withcp); +} + +int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } + +int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } + +int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { + return tcs.GetObjectToFile(object_name, fileName); +} diff --git a/source/libs/tcs/test/CMakeLists.txt b/source/libs/tcs/test/CMakeLists.txt new file mode 100644 index 0000000000..656c659476 --- /dev/null +++ b/source/libs/tcs/test/CMakeLists.txt @@ -0,0 +1,18 @@ +aux_source_directory(. TOS_TEST_SRC) + +add_executable(tosTest ${TOS_TEST_SRC}) +target_include_directories(tosTest + PUBLIC + "${TD_SOURCE_DIR}/include/libs/tosure" + "${CMAKE_CURRENT_SOURCE_DIR}/../inc" +) + +target_link_libraries(tosTest + tcs + gtest_main +) +enable_testing() +add_test( + NAME tos_test + COMMAND tosTest +) diff --git a/source/libs/tcs/test/tcsTest.c b/source/libs/tcs/test/tcsTest.c new file mode 100644 index 0000000000..68b39bd710 --- /dev/null +++ b/source/libs/tcs/test/tcsTest.c @@ -0,0 +1,395 @@ +#include +#include +#include +#include +/* +#include "walInt.h" +const char* ranStr = "tvapq02tcp"; +const int ranStrLen = strlen(ranStr); +SWalSyncInfo syncMeta = {0}; +class WalCleanEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalCleanDeleteEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalKeepEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + void SetUp() override { + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalRetentionEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + void SetUp() override { + SWalCfg cfg; + cfg.rollPeriod = -1; + cfg.segSize = -1; + cfg.retentionPeriod = -1; + cfg.retentionSize = 0; + cfg.rollPeriod = 0; + cfg.vgId = 0; + cfg.level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, &cfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +TEST_F(WalCleanEnv, createNew) { + walRollFileInfo(pWal); + ASSERT(pWal->fileInfoSet != NULL); + ASSERT_EQ(pWal->fileInfoSet->size, 1); + SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); + ASSERT_EQ(pInfo->firstVer, 0); + ASSERT_EQ(pInfo->lastVer, -1); + ASSERT_EQ(pInfo->closeTs, -1); + ASSERT_EQ(pInfo->fileSize, 0); +} +TEST_F(WalCleanEnv, serialize) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + char* ss = NULL; + code = walMetaSerialize(pWal, &ss); + ASSERT(code == 0); + printf("%s\n", ss); + taosMemoryFree(ss); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} +TEST_F(WalCleanEnv, removeOldMeta) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + code = walSaveMeta(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} +TEST_F(WalKeepEnv, readOldMeta) { + walResetEnv(); + int code; + syncMeta.isWeek = -1; + syncMeta.seqNum = UINT64_MAX; + syncMeta.term = UINT64_MAX; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + char* oldss = NULL; + code = walMetaSerialize(pWal, &oldss); + ASSERT(code == 0); + TearDown(); + SetUp(); + ASSERT_EQ(pWal->vers.firstVer, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + char* newss = NULL; + code = walMetaSerialize(pWal, &newss); + ASSERT(code == 0); + int len = strlen(oldss); + ASSERT_EQ(len, strlen(newss)); + for (int i = 0; i < len; i++) { + EXPECT_EQ(oldss[i], newss[i]); + } + taosMemoryFree(oldss); + taosMemoryFree(newss); +} +TEST_F(WalCleanEnv, write) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanEnv, rollback) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 5); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 4); + code = walRollback(pWal, 3); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 2); + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanEnv, rollbackMultiFile) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + if (i == 5) { + walBeginSnapshot(pWal, i, 0); + walEndSnapshot(pWal); + } + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 6); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 5); + code = walRollback(pWal, 5); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 5); + code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 6); + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanDeleteEnv, roll) { + int code; + int i; + for (i = 0; i < 100; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); + walEndSnapshot(pWal); + ASSERT_EQ(pWal->vers.snapshotVer, i - 1); + ASSERT_EQ(pWal->vers.verInSnapshotting, -1); + code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_NE(code, 0); + for (; i < 200; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + code = walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(code, 0); + code = walEndSnapshot(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalKeepEnv, readHandleRead) { + walResetEnv(); + int code; + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} +TEST_F(WalRetentionEnv, repairMeta1) { + walResetEnv(); + int code; + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + TearDown(); + // getchar(); + char buf[100]; + sprintf(buf, "%s/meta-ver%d", pathName, 0); + taosRemoveFile(buf); + sprintf(buf, "%s/meta-ver%d", pathName, 1); + taosRemoveFile(buf); + SetUp(); + // getchar(); + ASSERT_EQ(pWal->vers.lastVer, 99); + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + for (i = 100; i < 200; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 200; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} +*/ From 96b121e5d19c365c8c9bb75215b2fc4a6ebebd81 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 12:50:07 +0800 Subject: [PATCH 005/102] tcs/test: use cpp to compile --- source/libs/tcs/test/tcsTest.c | 395 --------------------------------- 1 file changed, 395 deletions(-) delete mode 100644 source/libs/tcs/test/tcsTest.c diff --git a/source/libs/tcs/test/tcsTest.c b/source/libs/tcs/test/tcsTest.c deleted file mode 100644 index 68b39bd710..0000000000 --- a/source/libs/tcs/test/tcsTest.c +++ /dev/null @@ -1,395 +0,0 @@ -#include -#include -#include -#include -/* -#include "walInt.h" -const char* ranStr = "tvapq02tcp"; -const int ranStrLen = strlen(ranStr); -SWalSyncInfo syncMeta = {0}; -class WalCleanEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalCleanDeleteEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalKeepEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - void SetUp() override { - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalRetentionEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - void SetUp() override { - SWalCfg cfg; - cfg.rollPeriod = -1; - cfg.segSize = -1; - cfg.retentionPeriod = -1; - cfg.retentionSize = 0; - cfg.rollPeriod = 0; - cfg.vgId = 0; - cfg.level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, &cfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -TEST_F(WalCleanEnv, createNew) { - walRollFileInfo(pWal); - ASSERT(pWal->fileInfoSet != NULL); - ASSERT_EQ(pWal->fileInfoSet->size, 1); - SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); - ASSERT_EQ(pInfo->firstVer, 0); - ASSERT_EQ(pInfo->lastVer, -1); - ASSERT_EQ(pInfo->closeTs, -1); - ASSERT_EQ(pInfo->fileSize, 0); -} -TEST_F(WalCleanEnv, serialize) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - char* ss = NULL; - code = walMetaSerialize(pWal, &ss); - ASSERT(code == 0); - printf("%s\n", ss); - taosMemoryFree(ss); - code = walSaveMeta(pWal); - ASSERT(code == 0); -} -TEST_F(WalCleanEnv, removeOldMeta) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); - code = walSaveMeta(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walSaveMeta(pWal); - ASSERT(code == 0); -} -TEST_F(WalKeepEnv, readOldMeta) { - walResetEnv(); - int code; - syncMeta.isWeek = -1; - syncMeta.seqNum = UINT64_MAX; - syncMeta.term = UINT64_MAX; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - char* oldss = NULL; - code = walMetaSerialize(pWal, &oldss); - ASSERT(code == 0); - TearDown(); - SetUp(); - ASSERT_EQ(pWal->vers.firstVer, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - char* newss = NULL; - code = walMetaSerialize(pWal, &newss); - ASSERT(code == 0); - int len = strlen(oldss); - ASSERT_EQ(len, strlen(newss)); - for (int i = 0; i < len; i++) { - EXPECT_EQ(oldss[i], newss[i]); - } - taosMemoryFree(oldss); - taosMemoryFree(newss); -} -TEST_F(WalCleanEnv, write) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanEnv, rollback) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 5); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 4); - code = walRollback(pWal, 3); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 2); - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanEnv, rollbackMultiFile) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - if (i == 5) { - walBeginSnapshot(pWal, i, 0); - walEndSnapshot(pWal); - } - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 6); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 5); - code = walRollback(pWal, 5); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 5); - code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 6); - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanDeleteEnv, roll) { - int code; - int i; - for (i = 0; i < 100; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); - walEndSnapshot(pWal); - ASSERT_EQ(pWal->vers.snapshotVer, i - 1); - ASSERT_EQ(pWal->vers.verInSnapshotting, -1); - code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_NE(code, 0); - for (; i < 200; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - code = walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(code, 0); - code = walEndSnapshot(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalKeepEnv, readHandleRead) { - walResetEnv(); - int code; - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} -TEST_F(WalRetentionEnv, repairMeta1) { - walResetEnv(); - int code; - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - TearDown(); - // getchar(); - char buf[100]; - sprintf(buf, "%s/meta-ver%d", pathName, 0); - taosRemoveFile(buf); - sprintf(buf, "%s/meta-ver%d", pathName, 1); - taosRemoveFile(buf); - SetUp(); - // getchar(); - ASSERT_EQ(pWal->vers.lastVer, 99); - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - for (i = 100; i < 200; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 200; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} -*/ From c5b4cc760b569e2e84ffcd5eff6d4f6246306ae6 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 13:15:51 +0800 Subject: [PATCH 006/102] tcs: link az later --- source/libs/tcs/CMakeLists.txt | 2 +- source/libs/tcs/test/CMakeLists.txt | 14 +- source/libs/tcs/test/tcsTest.cpp | 395 ++++++++++++++++++++++++++++ 3 files changed, 403 insertions(+), 8 deletions(-) create mode 100644 source/libs/tcs/test/tcsTest.cpp diff --git a/source/libs/tcs/CMakeLists.txt b/source/libs/tcs/CMakeLists.txt index 1c914a18b9..4d74dedcd0 100644 --- a/source/libs/tcs/CMakeLists.txt +++ b/source/libs/tcs/CMakeLists.txt @@ -9,7 +9,7 @@ target_include_directories( target_link_libraries( tcs - PUBLIC az + # PUBLIC az PUBLIC common # PUBLIC cjson # PUBLIC os diff --git a/source/libs/tcs/test/CMakeLists.txt b/source/libs/tcs/test/CMakeLists.txt index 656c659476..33fe75c589 100644 --- a/source/libs/tcs/test/CMakeLists.txt +++ b/source/libs/tcs/test/CMakeLists.txt @@ -1,18 +1,18 @@ -aux_source_directory(. TOS_TEST_SRC) +aux_source_directory(. TCS_TEST_SRC) -add_executable(tosTest ${TOS_TEST_SRC}) -target_include_directories(tosTest +add_executable(tcsTest ${TCS_TEST_SRC}) +target_include_directories(tcsTest PUBLIC - "${TD_SOURCE_DIR}/include/libs/tosure" + "${TD_SOURCE_DIR}/include/libs/tcs" "${CMAKE_CURRENT_SOURCE_DIR}/../inc" ) -target_link_libraries(tosTest +target_link_libraries(tcsTest tcs gtest_main ) enable_testing() add_test( - NAME tos_test - COMMAND tosTest + NAME tcs_test + COMMAND tcsTest ) diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp new file mode 100644 index 0000000000..68b39bd710 --- /dev/null +++ b/source/libs/tcs/test/tcsTest.cpp @@ -0,0 +1,395 @@ +#include +#include +#include +#include +/* +#include "walInt.h" +const char* ranStr = "tvapq02tcp"; +const int ranStrLen = strlen(ranStr); +SWalSyncInfo syncMeta = {0}; +class WalCleanEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalCleanDeleteEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalKeepEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + void SetUp() override { + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +class WalRetentionEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + static void TearDownTestCase() { walCleanUp(); } + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + void SetUp() override { + SWalCfg cfg; + cfg.rollPeriod = -1; + cfg.segSize = -1; + cfg.retentionPeriod = -1; + cfg.retentionSize = 0; + cfg.rollPeriod = 0; + cfg.vgId = 0; + cfg.level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, &cfg); + ASSERT(pWal != NULL); + } + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; +TEST_F(WalCleanEnv, createNew) { + walRollFileInfo(pWal); + ASSERT(pWal->fileInfoSet != NULL); + ASSERT_EQ(pWal->fileInfoSet->size, 1); + SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); + ASSERT_EQ(pInfo->firstVer, 0); + ASSERT_EQ(pInfo->lastVer, -1); + ASSERT_EQ(pInfo->closeTs, -1); + ASSERT_EQ(pInfo->fileSize, 0); +} +TEST_F(WalCleanEnv, serialize) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + char* ss = NULL; + code = walMetaSerialize(pWal, &ss); + ASSERT(code == 0); + printf("%s\n", ss); + taosMemoryFree(ss); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} +TEST_F(WalCleanEnv, removeOldMeta) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + code = walSaveMeta(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} +TEST_F(WalKeepEnv, readOldMeta) { + walResetEnv(); + int code; + syncMeta.isWeek = -1; + syncMeta.seqNum = UINT64_MAX; + syncMeta.term = UINT64_MAX; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + char* oldss = NULL; + code = walMetaSerialize(pWal, &oldss); + ASSERT(code == 0); + TearDown(); + SetUp(); + ASSERT_EQ(pWal->vers.firstVer, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + char* newss = NULL; + code = walMetaSerialize(pWal, &newss); + ASSERT(code == 0); + int len = strlen(oldss); + ASSERT_EQ(len, strlen(newss)); + for (int i = 0; i < len; i++) { + EXPECT_EQ(oldss[i], newss[i]); + } + taosMemoryFree(oldss); + taosMemoryFree(newss); +} +TEST_F(WalCleanEnv, write) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanEnv, rollback) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 5); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 4); + code = walRollback(pWal, 3); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 2); + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanEnv, rollbackMultiFile) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + if (i == 5) { + walBeginSnapshot(pWal, i, 0); + walEndSnapshot(pWal); + } + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 6); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 5); + code = walRollback(pWal, 5); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 5); + code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 6); + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalCleanDeleteEnv, roll) { + int code; + int i; + for (i = 0; i < 100; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); + walEndSnapshot(pWal); + ASSERT_EQ(pWal->vers.snapshotVer, i - 1); + ASSERT_EQ(pWal->vers.verInSnapshotting, -1); + code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_NE(code, 0); + for (; i < 200; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + code = walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(code, 0); + code = walEndSnapshot(pWal); + ASSERT_EQ(code, 0); +} +TEST_F(WalKeepEnv, readHandleRead) { + walResetEnv(); + int code; + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} +TEST_F(WalRetentionEnv, repairMeta1) { + walResetEnv(); + int code; + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + TearDown(); + // getchar(); + char buf[100]; + sprintf(buf, "%s/meta-ver%d", pathName, 0); + taosRemoveFile(buf); + sprintf(buf, "%s/meta-ver%d", pathName, 1); + taosRemoveFile(buf); + SetUp(); + // getchar(); + ASSERT_EQ(pWal->vers.lastVer, 99); + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + for (i = 100; i < 200; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 200; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} +*/ From 09cf91f256a9c64f87ac928a093ab83b7ac927fb Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 14:52:42 +0800 Subject: [PATCH 007/102] tcs/dnode: use tcs interface for dnode --- source/common/src/tglobal.c | 59 ++++++++++++---------- source/dnode/mgmt/exe/dmMain.c | 6 +-- source/dnode/mgmt/node_mgmt/CMakeLists.txt | 2 +- source/dnode/mgmt/node_mgmt/src/dmEnv.c | 18 +++---- 4 files changed, 42 insertions(+), 43 deletions(-) diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c index af1a8ccfbe..45546f9a59 100644 --- a/source/common/src/tglobal.c +++ b/source/common/src/tglobal.c @@ -179,12 +179,12 @@ int32_t tsRedirectFactor = 2; int32_t tsRedirectMaxPeriod = 1000; int32_t tsMaxRetryWaitTime = 10000; bool tsUseAdapter = false; -int32_t tsMetaCacheMaxSize = -1; // MB -int32_t tsSlowLogThreshold = 10; // seconds -int32_t tsSlowLogThresholdTest = INT32_MAX; // seconds -char tsSlowLogExceptDb[TSDB_DB_NAME_LEN] = ""; // seconds +int32_t tsMetaCacheMaxSize = -1; // MB +int32_t tsSlowLogThreshold = 10; // seconds +int32_t tsSlowLogThresholdTest = INT32_MAX; // seconds +char tsSlowLogExceptDb[TSDB_DB_NAME_LEN] = ""; // seconds int32_t tsSlowLogScope = SLOW_LOG_TYPE_QUERY; -char* tsSlowLogScopeString = "query"; +char *tsSlowLogScopeString = "query"; int32_t tsSlowLogMaxLen = 4096; int32_t tsTimeSeriesThreshold = 50; bool tsMultiResultFunctionStarReturnTags = false; @@ -306,6 +306,7 @@ char tsS3AppId[TSDB_MAX_EP_NUM][TSDB_FQDN_LEN] = {""}; int8_t tsS3Enabled = false; int8_t tsS3EnabledCfg = false; int8_t tsS3Oss[TSDB_MAX_EP_NUM] = {false}; +int8_t tsS3Ablob = false; int8_t tsS3StreamEnabled = false; int8_t tsS3Https[TSDB_MAX_EP_NUM] = {true}; @@ -322,7 +323,6 @@ int32_t tsMaxTsmaNum = 3; int32_t tsMaxTsmaCalcDelay = 600; int64_t tsmaDataDeleteMark = 1000 * 60 * 60 * 24; // in ms, default to 1d - #define TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, pName) \ if ((pItem = cfgGetItem(pCfg, pName)) == NULL) { \ TAOS_RETURN(TSDB_CODE_CFG_NOT_FOUND); \ @@ -361,7 +361,7 @@ static int32_t taosSplitS3Cfg(SConfig *pCfg, const char *name, char gVarible[TSD TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, name); char *strDup = NULL; - if ((strDup = taosStrdup(pItem->str))== NULL){ + if ((strDup = taosStrdup(pItem->str)) == NULL) { code = TSDB_CODE_OUT_OF_MEMORY; goto _exit; } @@ -435,6 +435,7 @@ int32_t taosSetS3Cfg(SConfig *pCfg) { } tsS3Https[i] = (strstr(tsS3Endpoint[i], "https://") != NULL); tsS3Oss[i] = (strstr(tsS3Endpoint[i], "aliyuncs.") != NULL); + tsS3Ablob = (strstr(tsS3Endpoint[i], ".blob.core.windows.net") != NULL); } if (tsS3BucketName[0] != '<') { @@ -450,7 +451,9 @@ int32_t taosSetS3Cfg(SConfig *pCfg) { TAOS_RETURN(TSDB_CODE_SUCCESS); } -struct SConfig *taosGetCfg() { return tsCfg; } +struct SConfig *taosGetCfg() { + return tsCfg; +} static int32_t taosLoadCfg(SConfig *pCfg, const char **envCmd, const char *inputCfgDir, const char *envFile, char *apolloUrl) { @@ -572,7 +575,8 @@ static int32_t taosAddClientCfg(SConfig *pCfg) { TAOS_CHECK_RETURN( cfgAddInt32(pCfg, "compressMsgSize", tsCompressMsgSize, -1, 100000000, CFG_SCOPE_BOTH, CFG_DYN_CLIENT)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "queryPolicy", tsQueryPolicy, 1, 4, CFG_SCOPE_CLIENT, CFG_DYN_ENT_CLIENT)); - TAOS_CHECK_RETURN(cfgAddBool(pCfg, "queryTableNotExistAsEmpty", tsQueryTbNotExistAsEmpty, CFG_SCOPE_CLIENT, CFG_DYN_CLIENT)); + TAOS_CHECK_RETURN( + cfgAddBool(pCfg, "queryTableNotExistAsEmpty", tsQueryTbNotExistAsEmpty, CFG_SCOPE_CLIENT, CFG_DYN_CLIENT)); TAOS_CHECK_RETURN(cfgAddBool(pCfg, "enableQueryHb", tsEnableQueryHb, CFG_SCOPE_CLIENT, CFG_DYN_CLIENT)); TAOS_CHECK_RETURN(cfgAddBool(pCfg, "enableScience", tsEnableScience, CFG_SCOPE_CLIENT, CFG_DYN_NONE)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "querySmaOptimize", tsQuerySmaOptimize, 0, 1, CFG_SCOPE_CLIENT, CFG_DYN_CLIENT)); @@ -600,7 +604,8 @@ static int32_t taosAddClientCfg(SConfig *pCfg) { TAOS_CHECK_RETURN( cfgAddInt32(pCfg, "metaCacheMaxSize", tsMetaCacheMaxSize, -1, INT32_MAX, CFG_SCOPE_CLIENT, CFG_DYN_CLIENT)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "randErrorChance", tsRandErrChance, 0, 10000, CFG_SCOPE_BOTH, CFG_DYN_BOTH)); - TAOS_CHECK_RETURN(cfgAddInt64(pCfg, "randErrorDivisor", tsRandErrDivisor, 1, INT64_MAX, CFG_SCOPE_BOTH, CFG_DYN_BOTH)); + TAOS_CHECK_RETURN( + cfgAddInt64(pCfg, "randErrorDivisor", tsRandErrDivisor, 1, INT64_MAX, CFG_SCOPE_BOTH, CFG_DYN_BOTH)); TAOS_CHECK_RETURN(cfgAddInt64(pCfg, "randErrorScope", tsRandErrScope, 0, INT64_MAX, CFG_SCOPE_BOTH, CFG_DYN_BOTH)); tsNumOfRpcThreads = tsNumOfCores / 2; @@ -1088,9 +1093,9 @@ int32_t taosSetSlowLogScope(char *pScopeStr, int32_t *pScope) { int32_t slowScope = 0; - char* scope = NULL; - char *tmp = NULL; - while((scope = strsep(&pScopeStr, "|")) != NULL){ + char *scope = NULL; + char *tmp = NULL; + while ((scope = strsep(&pScopeStr, "|")) != NULL) { taosMemoryFreeClear(tmp); tmp = taosStrdup(scope); if (tmp == NULL) { @@ -1147,13 +1152,13 @@ static int32_t taosSetClientCfg(SConfig *pCfg) { (void)snprintf(defaultFirstEp, TSDB_EP_LEN, "%s:%u", tsLocalFqdn, tsServerPort); TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "firstEp"); - SEp firstEp = {0}; + SEp firstEp = {0}; TAOS_CHECK_RETURN(taosGetFqdnPortFromEp(strlen(pItem->str) == 0 ? defaultFirstEp : pItem->str, &firstEp)); (void)snprintf(tsFirst, sizeof(tsFirst), "%s:%u", firstEp.fqdn, firstEp.port); TAOS_CHECK_RETURN(cfgSetItem(pCfg, "firstEp", tsFirst, pItem->stype, true)); TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "secondEp"); - SEp secondEp = {0}; + SEp secondEp = {0}; TAOS_CHECK_RETURN(taosGetFqdnPortFromEp(strlen(pItem->str) == 0 ? defaultFirstEp : pItem->str, &secondEp)); (void)snprintf(tsSecond, sizeof(tsSecond), "%s:%u", secondEp.fqdn, secondEp.port); TAOS_CHECK_RETURN(cfgSetItem(pCfg, "secondEp", tsSecond, pItem->stype, true)); @@ -1653,8 +1658,8 @@ static int32_t taosSetAllDebugFlag(SConfig *pCfg, int32_t flag); int32_t taosCreateLog(const char *logname, int32_t logFileNum, const char *cfgDir, const char **envCmd, const char *envFile, char *apolloUrl, SArray *pArgs, bool tsc) { - int32_t code = TSDB_CODE_SUCCESS; - int32_t lino = 0; + int32_t code = TSDB_CODE_SUCCESS; + int32_t lino = 0; SConfig *pCfg = NULL; if (tsCfg == NULL) { @@ -1725,7 +1730,7 @@ int32_t taosReadDataFolder(const char *cfgDir, const char **envCmd, const char * TAOS_CHECK_RETURN(cfgInit(&pCfg)); TAOS_CHECK_GOTO(cfgAddDir(pCfg, "dataDir", tsDataDir, CFG_SCOPE_SERVER, CFG_DYN_NONE), NULL, _exit); - TAOS_CHECK_GOTO(cfgAddInt32(pCfg, "dDebugFlag", dDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER) ,NULL, _exit); + TAOS_CHECK_GOTO(cfgAddInt32(pCfg, "dDebugFlag", dDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER), NULL, _exit); if ((code = taosLoadCfg(pCfg, envCmd, cfgDir, envFile, apolloUrl)) != 0) { (void)printf("failed to load cfg since %s\n", tstrerror(code)); @@ -1754,7 +1759,7 @@ _exit: static int32_t taosCheckGlobalCfg() { uint32_t ipv4 = 0; - int32_t code = taosGetIpv4FromFqdn(tsLocalFqdn, &ipv4); + int32_t code = taosGetIpv4FromFqdn(tsLocalFqdn, &ipv4); if (code) { uError("failed to get ip from fqdn:%s since %s, dnode can not be initialized", tsLocalFqdn, tstrerror(code)); TAOS_RETURN(TSDB_CODE_RPC_FQDN_ERROR); @@ -1859,7 +1864,7 @@ typedef struct { static int32_t taosCfgSetOption(OptionNameAndVar *pOptions, int32_t optionSize, SConfigItem *pItem, bool isDebugflag) { int32_t code = TSDB_CODE_CFG_NOT_FOUND; - char *name = pItem->name; + char *name = pItem->name; for (int32_t d = 0; d < optionSize; ++d) { const char *optName = pOptions[d].optionName; if (strcasecmp(name, optName) != 0) continue; @@ -2054,8 +2059,8 @@ static int32_t taosCfgDynamicOptionsForClient(SConfig *pCfg, const char *name) { } case 'f': { if (strcasecmp("fqdn", name) == 0) { - SConfigItem* pFqdnItem = cfgGetItem(pCfg, "fqdn"); - SConfigItem* pServerPortItem = cfgGetItem(pCfg, "serverPort"); + SConfigItem *pFqdnItem = cfgGetItem(pCfg, "fqdn"); + SConfigItem *pServerPortItem = cfgGetItem(pCfg, "serverPort"); SConfigItem *pFirstEpItem = cfgGetItem(pCfg, "firstEp"); if (pFqdnItem == NULL || pServerPortItem == NULL || pFirstEpItem == NULL) { uError("failed to get fqdn or serverPort or firstEp from cfg"); @@ -2070,7 +2075,7 @@ static int32_t taosCfgDynamicOptionsForClient(SConfig *pCfg, const char *name) { char defaultFirstEp[TSDB_EP_LEN] = {0}; (void)snprintf(defaultFirstEp, TSDB_EP_LEN, "%s:%u", tsLocalFqdn, tsServerPort); - SEp firstEp = {0}; + SEp firstEp = {0}; TAOS_CHECK_GOTO( taosGetFqdnPortFromEp(strlen(pFirstEpItem->str) == 0 ? defaultFirstEp : pFirstEpItem->str, &firstEp), &lino, _out); @@ -2110,8 +2115,8 @@ static int32_t taosCfgDynamicOptionsForClient(SConfig *pCfg, const char *name) { } case 'l': { if (strcasecmp("locale", name) == 0) { - SConfigItem* pLocaleItem = cfgGetItem(pCfg, "locale"); - SConfigItem* pCharsetItem = cfgGetItem(pCfg, "charset"); + SConfigItem *pLocaleItem = cfgGetItem(pCfg, "locale"); + SConfigItem *pCharsetItem = cfgGetItem(pCfg, "charset"); if (pLocaleItem == NULL || pCharsetItem == NULL) { uError("failed to get locale or charset from cfg"); code = TSDB_CODE_CFG_NOT_FOUND; @@ -2184,7 +2189,7 @@ static int32_t taosCfgDynamicOptionsForClient(SConfig *pCfg, const char *name) { char defaultFirstEp[TSDB_EP_LEN] = {0}; (void)snprintf(defaultFirstEp, TSDB_EP_LEN, "%s:%u", tsLocalFqdn, tsServerPort); - SEp firstEp = {0}; + SEp firstEp = {0}; TAOS_CHECK_GOTO( taosGetFqdnPortFromEp(strlen(pFirstEpItem->str) == 0 ? defaultFirstEp : pFirstEpItem->str, &firstEp), &lino, _out); @@ -2315,7 +2320,7 @@ int32_t taosSetGlobalDebugFlag(int32_t flag) { return taosSetAllDebugFlag(tsCfg, // NOTE: set all command does not change the tmrDebugFlag static int32_t taosSetAllDebugFlag(SConfig *pCfg, int32_t flag) { if (flag < 0) TAOS_RETURN(TSDB_CODE_INVALID_PARA); - if (flag == 0) TAOS_RETURN(TSDB_CODE_SUCCESS); // just ignore + if (flag == 0) TAOS_RETURN(TSDB_CODE_SUCCESS); // just ignore SArray *noNeedToSetVars = NULL; SConfigItem *pItem = NULL; diff --git a/source/dnode/mgmt/exe/dmMain.c b/source/dnode/mgmt/exe/dmMain.c index 89569d69d6..6069dc33f1 100644 --- a/source/dnode/mgmt/exe/dmMain.c +++ b/source/dnode/mgmt/exe/dmMain.c @@ -23,6 +23,7 @@ #include "jemalloc/jemalloc.h" #endif #include "dmUtil.h" +#include "tcs.h" #if defined(CUS_NAME) || defined(CUS_PROMPT) || defined(CUS_EMAIL) #include "cus_name.h" @@ -325,10 +326,9 @@ static int32_t dmCheckS3() { int32_t code = 0; SConfig *pCfg = taosGetCfg(); cfgDumpCfgS3(pCfg, 0, true); -#if defined(USE_S3) - extern int32_t s3CheckCfg(); - code = s3CheckCfg(); +#if defined(USE_S3) + code = tcsCheckCfg(); #endif return code; } diff --git a/source/dnode/mgmt/node_mgmt/CMakeLists.txt b/source/dnode/mgmt/node_mgmt/CMakeLists.txt index 82b9384d66..98de62eee1 100644 --- a/source/dnode/mgmt/node_mgmt/CMakeLists.txt +++ b/source/dnode/mgmt/node_mgmt/CMakeLists.txt @@ -1,7 +1,7 @@ aux_source_directory(src IMPLEMENT_SRC) add_library(dnode STATIC ${IMPLEMENT_SRC}) target_link_libraries( - dnode mgmt_mnode mgmt_qnode mgmt_snode mgmt_vnode mgmt_dnode monitorfw + dnode mgmt_mnode mgmt_qnode mgmt_snode mgmt_vnode mgmt_dnode monitorfw tcs ) IF (TD_ENTERPRISE) diff --git a/source/dnode/mgmt/node_mgmt/src/dmEnv.c b/source/dnode/mgmt/node_mgmt/src/dmEnv.c index 2d0ad70adf..d72bc79034 100644 --- a/source/dnode/mgmt/node_mgmt/src/dmEnv.c +++ b/source/dnode/mgmt/node_mgmt/src/dmEnv.c @@ -20,6 +20,7 @@ #include "libs/function/tudf.h" #include "tgrant.h" #include "tcompare.h" +#include "tcs.h" // clang-format on #define DM_INIT_AUDIT() \ @@ -97,9 +98,9 @@ static bool dmDataSpaceAvailable() { static int32_t dmCheckDiskSpace() { // availability int32_t code = 0; - code = osUpdate(); - if(code != 0) { - code = 0; // ignore the error, just log it + code = osUpdate(); + if (code != 0) { + code = 0; // ignore the error, just log it dError("failed to update os info since %s", tstrerror(code)); } if (!dmDataSpaceAvailable()) { @@ -162,13 +163,6 @@ static int32_t dmCheckDataDirVersionWrapper() { } return 0; } -#if defined(USE_S3) - -extern int32_t s3Begin(); -extern void s3End(); -extern int8_t tsS3Enabled; - -#endif int32_t dmInit() { dInfo("start to init dnode env"); @@ -186,7 +180,7 @@ int32_t dmInit() { if ((code = dmInitDnode(dmInstance())) != 0) return code; if ((code = InitRegexCache() != 0)) return code; #if defined(USE_S3) - if ((code = s3Begin()) != 0) return code; + if ((code = tcsInit()) != 0) return code; #endif dInfo("dnode env is initialized"); @@ -219,7 +213,7 @@ void dmCleanup() { DestroyRegexCache(); #if defined(USE_S3) - s3End(); + tcsUninit(); #endif dInfo("dnode env is cleaned up"); From 3905c94f035db22d95d7c2eca7644dfbb004665a Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 16:34:37 +0800 Subject: [PATCH 008/102] vnode/tcs: use tcs instead of s3 interface --- source/dnode/vnode/CMakeLists.txt | 3 ++ source/dnode/vnode/src/tsdb/tsdbCache.c | 12 +++--- source/dnode/vnode/src/tsdb/tsdbFile2.c | 4 +- .../dnode/vnode/src/tsdb/tsdbReaderWriter.c | 4 +- source/dnode/vnode/src/tsdb/tsdbRetention.c | 37 ++----------------- source/dnode/vnode/src/vnd/vnodeOpen.c | 4 +- 6 files changed, 18 insertions(+), 46 deletions(-) diff --git a/source/dnode/vnode/CMakeLists.txt b/source/dnode/vnode/CMakeLists.txt index f70a8844ba..83ed98d7b7 100644 --- a/source/dnode/vnode/CMakeLists.txt +++ b/source/dnode/vnode/CMakeLists.txt @@ -119,6 +119,7 @@ if (${BUILD_CONTRIB}) vnode PUBLIC "inc" PUBLIC "src/inc" + PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" PUBLIC "${TD_SOURCE_DIR}/include/libs/crypt" PUBLIC "${TD_SOURCE_DIR}/include/dnode/vnode" @@ -129,6 +130,7 @@ else() vnode PUBLIC "inc" PUBLIC "src/inc" + PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" PUBLIC "${TD_SOURCE_DIR}/include/libs/crypt" PUBLIC "${TD_SOURCE_DIR}/include/dnode/vnode" @@ -164,6 +166,7 @@ target_link_libraries( PUBLIC tdb PUBLIC audit PUBLIC crypt + PUBLIC tcs # PUBLIC bdb # PUBLIC scalar diff --git a/source/dnode/vnode/src/tsdb/tsdbCache.c b/source/dnode/vnode/src/tsdb/tsdbCache.c index 85f74b1672..ac5689724f 100644 --- a/source/dnode/vnode/src/tsdb/tsdbCache.c +++ b/source/dnode/vnode/src/tsdb/tsdbCache.c @@ -12,8 +12,8 @@ * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see . */ -#include "cos.h" #include "functionMgt.h" +#include "tcs.h" #include "tsdb.h" #include "tsdbDataFileRW.h" #include "tsdbIter.h" @@ -1251,7 +1251,8 @@ static int32_t tsdbCacheUpdate(STsdb *pTsdb, tb_uid_t suid, tb_uid_t uid, SArray } if (NULL == pLastCol || cmp_res < 0 || (cmp_res == 0 && !COL_VAL_IS_NONE(pColVal))) { - SLastCol lastColTmp = {.rowKey = *pRowKey, .colVal = *pColVal, .dirty = 0, .cacheStatus = TSDB_LAST_CACHE_VALID}; + SLastCol lastColTmp = { + .rowKey = *pRowKey, .colVal = *pColVal, .dirty = 0, .cacheStatus = TSDB_LAST_CACHE_VALID}; if ((code = tsdbCachePutToRocksdb(pTsdb, &idxKey->key, &lastColTmp)) != TSDB_CODE_SUCCESS) { tsdbError("tsdb/cache: vgId:%d, put rocks failed at line %d since %s.", TD_VID(pTsdb->pVnode), lino, tstrerror(code)); @@ -1698,8 +1699,7 @@ static int32_t tsdbCacheLoadFromRocks(STsdb *pTsdb, tb_uid_t uid, SArray *pLastA if (pLastCol && pLastCol->cacheStatus != TSDB_LAST_CACHE_NO_CACHE) { code = tsdbCachePutToLRU(pTsdb, &idxKey->key, pLastCol, 0); if (code) { - tsdbError("vgId:%d, %s failed at line %d since %s", TD_VID(pTsdb->pVnode), __func__, __LINE__, - tstrerror(code)); + tsdbError("vgId:%d, %s failed at line %d since %s", TD_VID(pTsdb->pVnode), __func__, __LINE__, tstrerror(code)); taosMemoryFreeClear(pToFree); TAOS_CHECK_EXIT(code); } @@ -3503,7 +3503,7 @@ static int32_t tsdbCacheLoadBlockS3(STsdbFD *pFD, uint8_t **ppBlock) { int64_t block_offset = (pFD->blkno - 1) * tsS3BlockSize * pFD->szPage; - TAOS_CHECK_RETURN(s3GetObjectBlock(pFD->objName, block_offset, tsS3BlockSize * pFD->szPage, 0, ppBlock)); + TAOS_CHECK_RETURN(tcsGetObjectBlock(pFD->objName, block_offset, tsS3BlockSize * pFD->szPage, 0, ppBlock)); tsdbTrace("block:%p load from s3", *ppBlock); @@ -3600,4 +3600,4 @@ void tsdbCacheSetPageS3(SLRUCache *pCache, STsdbFD *pFD, int64_t pgno, uint8_t * (void)taosThreadMutexUnlock(&pFD->pTsdb->pgMutex); tsdbCacheRelease(pFD->pTsdb->pgCache, handle); -} \ No newline at end of file +} diff --git a/source/dnode/vnode/src/tsdb/tsdbFile2.c b/source/dnode/vnode/src/tsdb/tsdbFile2.c index da78d67db3..ad5f02d601 100644 --- a/source/dnode/vnode/src/tsdb/tsdbFile2.c +++ b/source/dnode/vnode/src/tsdb/tsdbFile2.c @@ -14,7 +14,7 @@ */ #include "tsdbFile2.h" -#include "cos.h" +#include "tcs.h" #include "vnd.h" // to_json @@ -318,7 +318,7 @@ static void tsdbTFileObjRemoveLC(STFileObj *fobj, bool remove_all) { } *(dot + 1) = 0; - s3DeleteObjectsByPrefix(object_name_prefix); + tcsDeleteObjectsByPrefix(object_name_prefix); // remove local last chunk file dot = strrchr(lc_path, '.'); diff --git a/source/dnode/vnode/src/tsdb/tsdbReaderWriter.c b/source/dnode/vnode/src/tsdb/tsdbReaderWriter.c index d867318e1c..53e1c57f14 100644 --- a/source/dnode/vnode/src/tsdb/tsdbReaderWriter.c +++ b/source/dnode/vnode/src/tsdb/tsdbReaderWriter.c @@ -13,8 +13,8 @@ * along with this program. If not, see . */ -#include "cos.h" #include "crypt.h" +#include "tcs.h" #include "tsdb.h" #include "tsdbDef.h" #include "vnd.h" @@ -391,7 +391,7 @@ static int32_t tsdbReadFileBlock(STsdbFD *pFD, int64_t offset, int64_t size, boo snprintf(dot + 1, TSDB_FQDN_LEN - (dot + 1 - object_name_prefix), "%d.data", chunkno); - code = s3GetObjectBlock(object_name_prefix, cOffset, nRead, check, &pBlock); + code = tcsGetObjectBlock(object_name_prefix, cOffset, nRead, check, &pBlock); TSDB_CHECK_CODE(code, lino, _exit); memcpy(buf + n, pBlock, nRead); diff --git a/source/dnode/vnode/src/tsdb/tsdbRetention.c b/source/dnode/vnode/src/tsdb/tsdbRetention.c index cbe2ab4b8e..0072fd5e7f 100644 --- a/source/dnode/vnode/src/tsdb/tsdbRetention.c +++ b/source/dnode/vnode/src/tsdb/tsdbRetention.c @@ -13,7 +13,7 @@ * along with this program. If not, see . */ -#include "cos.h" +#include "tcs.h" #include "tsdb.h" #include "tsdbFS2.h" #include "vnd.h" @@ -426,35 +426,6 @@ static int32_t tsdbS3FidLevel(int32_t fid, STsdbKeepCfg *pKeepCfg, int32_t s3Kee } } -static int32_t tsdbCopyFileS3(SRTNer *rtner, const STFileObj *from, const STFile *to) { - int32_t code = 0; - int32_t lino = 0; - - char fname[TSDB_FILENAME_LEN]; - TdFilePtr fdFrom = NULL; - // TdFilePtr fdTo = NULL; - - tsdbTFileName(rtner->tsdb, to, fname); - - fdFrom = taosOpenFile(from->fname, TD_FILE_READ); - if (fdFrom == NULL) { - TAOS_CHECK_GOTO(terrno, &lino, _exit); - } - - char *object_name = taosDirEntryBaseName(fname); - TAOS_CHECK_GOTO(s3PutObjectFromFile2(from->fname, object_name, 1), &lino, _exit); - -_exit: - if (code) { - tsdbError("vgId:%d %s failed at line %s:%d since %s", TD_VID(rtner->tsdb->pVnode), __func__, __FILE__, lino, - tstrerror(code)); - } - if (taosCloseFile(&fdFrom) != 0) { - tsdbTrace("vgId:%d, failed to close file", TD_VID(rtner->tsdb->pVnode)); - } - return code; -} - static int32_t tsdbMigrateDataFileLCS3(SRTNer *rtner, const STFileObj *fobj, int64_t size, int64_t chunksize) { int32_t code = 0; int32_t lino = 0; @@ -519,7 +490,7 @@ static int32_t tsdbMigrateDataFileLCS3(SRTNer *rtner, const STFileObj *fobj, int snprintf(dot + 1, TSDB_FQDN_LEN - (dot + 1 - object_name_prefix), "%d.data", cn); int64_t c_offset = chunksize * (cn - fobj->f->lcn); - TAOS_CHECK_GOTO(s3PutObjectFromFileOffset(fname, object_name_prefix, c_offset, chunksize), &lino, _exit); + TAOS_CHECK_GOTO(tcsPutObjectFromFileOffset(fname, object_name_prefix, c_offset, chunksize), &lino, _exit); } // copy last chunk @@ -618,7 +589,7 @@ static int32_t tsdbMigrateDataFileS3(SRTNer *rtner, const STFileObj *fobj, int64 snprintf(dot + 1, TSDB_FQDN_LEN - (dot + 1 - object_name_prefix), "%d.data", cn); int64_t c_offset = chunksize * (cn - 1); - TAOS_CHECK_GOTO(s3PutObjectFromFileOffset(fobj->fname, object_name_prefix, c_offset, chunksize), &lino, _exit); + TAOS_CHECK_GOTO(tcsPutObjectFromFileOffset(fobj->fname, object_name_prefix, c_offset, chunksize), &lino, _exit); } // copy last chunk @@ -741,8 +712,6 @@ _exit: int32_t tsdbAsyncS3Migrate(STsdb *tsdb, int64_t now) { int32_t code = 0; - extern int8_t tsS3EnabledCfg; - int32_t expired = grantCheck(TSDB_GRANT_OBJECT_STORAGE); if (expired && tsS3Enabled) { tsdbWarn("s3 grant expired: %d", expired); diff --git a/source/dnode/vnode/src/vnd/vnodeOpen.c b/source/dnode/vnode/src/vnd/vnodeOpen.c index 0d04486925..53365303b0 100644 --- a/source/dnode/vnode/src/vnd/vnodeOpen.c +++ b/source/dnode/vnode/src/vnd/vnodeOpen.c @@ -13,8 +13,8 @@ * along with this program. If not, see . */ -#include "cos.h" #include "sync.h" +#include "tcs.h" #include "tsdb.h" #include "vnd.h" @@ -327,7 +327,7 @@ void vnodeDestroy(int32_t vgId, const char *path, STfs *pTfs, int32_t nodeId) { if (nodeId > 0 && vgId > 0 /*&& nlevel > 1*/ && tsS3Enabled) { char vnode_prefix[TSDB_FILENAME_LEN]; snprintf(vnode_prefix, TSDB_FILENAME_LEN, "%d/v%df", nodeId, vgId); - s3DeleteObjectsByPrefix(vnode_prefix); + tcsDeleteObjectsByPrefix(vnode_prefix); } } From c0ef07c050bdea314d23e5a7824721e8d3eec670 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 18:48:34 +0800 Subject: [PATCH 009/102] az: libaz module from ablob --- include/libs/azure/az.h | 45 ++ source/libs/CMakeLists.txt | 2 +- source/libs/azure/CMakeLists.txt | 32 + .../libs/azure/inc/td_block_blob_client.hpp | 260 ++++++++ source/libs/azure/src/avro_parser.cpp | 531 +++++++++++++++ source/libs/azure/src/avro_parser.hpp | 198 ++++++ source/libs/azure/src/az.cpp | 402 +++++++++++ .../libs/azure/src/td_block_blob_client.cpp | 625 ++++++++++++++++++ source/libs/azure/test/CMakeLists.txt | 18 + source/libs/azure/test/azTest.cpp | 457 +++++++++++++ source/libs/tcs/CMakeLists.txt | 2 +- source/libs/tcs/src/tcs.c | 5 +- 12 files changed, 2572 insertions(+), 5 deletions(-) create mode 100644 include/libs/azure/az.h create mode 100644 source/libs/azure/CMakeLists.txt create mode 100644 source/libs/azure/inc/td_block_blob_client.hpp create mode 100644 source/libs/azure/src/avro_parser.cpp create mode 100644 source/libs/azure/src/avro_parser.hpp create mode 100644 source/libs/azure/src/az.cpp create mode 100644 source/libs/azure/src/td_block_blob_client.cpp create mode 100644 source/libs/azure/test/CMakeLists.txt create mode 100644 source/libs/azure/test/azTest.cpp diff --git a/include/libs/azure/az.h b/include/libs/azure/az.h new file mode 100644 index 0000000000..55839b0727 --- /dev/null +++ b/include/libs/azure/az.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef _TD_AZURE_H_ +#define _TD_AZURE_H_ + +#include "os.h" +#include "tarray.h" +#include "tdef.h" +#include "tlog.h" +#include "tmsg.h" + +#ifdef __cplusplus +extern "C" { +#endif + +int32_t azBegin(); +void azEnd(); +int32_t azCheckCfg(); +int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size); +int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock); +void azDeleteObjectsByPrefix(const char *prefix); + +int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp); +int32_t azGetObjectsByPrefix(const char *prefix, const char *path); +int32_t azGetObjectToFile(const char *object_name, const char *fileName); +int32_t azDeleteObjects(const char *object_name[], int nobject); + +#ifdef __cplusplus +} +#endif + +#endif // _TD_AZURE_H_ diff --git a/source/libs/CMakeLists.txt b/source/libs/CMakeLists.txt index 41a1e99521..033582f2c0 100644 --- a/source/libs/CMakeLists.txt +++ b/source/libs/CMakeLists.txt @@ -23,5 +23,5 @@ add_subdirectory(planner) add_subdirectory(qworker) add_subdirectory(geometry) add_subdirectory(command) -#add_subdirectory(azure) +add_subdirectory(azure) add_subdirectory(tcs) diff --git a/source/libs/azure/CMakeLists.txt b/source/libs/azure/CMakeLists.txt new file mode 100644 index 0000000000..1d46a2924b --- /dev/null +++ b/source/libs/azure/CMakeLists.txt @@ -0,0 +1,32 @@ +#if(${TD_LINUX}) +aux_source_directory(src AZ_SRC) + +add_library(az STATIC ${AZ_SRC}) + +if(${BUILD_S3}) + add_definitions(-DUSE_S3) + target_link_libraries( + az + PUBLIC _azure_sdk + PUBLIC crypt + ) +endif() + +target_include_directories( + az + PUBLIC "${TD_SOURCE_DIR}/include/libs/azure" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" +) + +target_link_libraries( + az + PUBLIC cjson + PUBLIC os + PUBLIC util + PUBLIC common +) + +if(${BUILD_TEST}) + add_subdirectory(test) +endif(${BUILD_TEST}) +#endif(${TD_LINUX}) diff --git a/source/libs/azure/inc/td_block_blob_client.hpp b/source/libs/azure/inc/td_block_blob_client.hpp new file mode 100644 index 0000000000..1b00104821 --- /dev/null +++ b/source/libs/azure/inc/td_block_blob_client.hpp @@ -0,0 +1,260 @@ +#pragma once + +#include "azure/storage/blobs/blob_client.hpp" + +#include +#include +#include + +namespace Azure { +namespace Storage { +namespace Files { +namespace DataLake { +class FileClient; +} +} // namespace Files +} // namespace Storage +} // namespace Azure + +namespace Azure { +namespace Storage { +namespace Blobs { + +/** + * @brief The TDBlockBlobClient allows you to manipulate Azure Storage block blobs. + * + * Block blobs let you upload large blobs efficiently. Block blobs are comprised of blocks, each + * of which is identified by a block ID. You create or modify a block blob by writing a set of + * blocks and committing them by their block IDs. Each block can be a different size. + * + * When you upload a block to a blob in your storage account, it is associated with the specified + * block blob, but it does not become part of the blob until you commit a list of blocks that + * includes the new block's ID. New blocks remain in an uncommitted state until they are + * specifically committed or discarded. Writing a block does not update the last modified time of + * an existing blob. + */ +class TDBlockBlobClient final : public BlobClient { + public: + /** + * @brief Initialize a new instance of TDBlockBlobClient. + * + * @param connectionString A connection string includes the authentication information required + * for your application to access data in an Azure Storage account at runtime. + * @param blobContainerName The name of the container containing this blob. + * @param blobName The name of this blob. + * @param options Optional client options that define the transport pipeline policies for + * authentication, retries, etc., that are applied to every request. + * @return A new TDBlockBlobClient instance. + */ + static TDBlockBlobClient CreateFromConnectionString(const std::string& connectionString, + const std::string& blobContainerName, const std::string& blobName, + const BlobClientOptions& options = BlobClientOptions()); + + /** + * @brief Initialize a new instance of TDBlockBlobClient. + * + * @param blobUrl A URL + * referencing the blob that includes the name of the account, the name of the container, and + * the name of the blob. + * @param credential The shared key credential used to sign + * requests. + * @param options Optional client options that define the transport pipeline + * policies for authentication, retries, etc., that are applied to every request. + */ + explicit TDBlockBlobClient(const std::string& blobUrl, std::shared_ptr credential, + const BlobClientOptions& options = BlobClientOptions()); + + /** + * @brief Initialize a new instance of TDBlockBlobClient. + * + * @param blobUrl A URL + * referencing the blob that includes the name of the account, the name of the container, and + * the name of the blob. + * @param credential The token credential used to sign requests. + * @param options Optional client options that define the transport pipeline policies for + * authentication, retries, etc., that are applied to every request. + */ + explicit TDBlockBlobClient(const std::string& blobUrl, std::shared_ptr credential, + const BlobClientOptions& options = BlobClientOptions()); + + /** + * @brief Initialize a new instance of TDBlockBlobClient. + * + * @param blobUrl A URL + * referencing the blob that includes the name of the account, the name of the container, and + * the name of the blob, and possibly also a SAS token. + * @param options Optional client + * options that define the transport pipeline policies for authentication, retries, etc., that + * are applied to every request. + */ + explicit TDBlockBlobClient(const std::string& blobUrl, const BlobClientOptions& options = BlobClientOptions()); + + /** + * @brief Initializes a new instance of the TDBlockBlobClient class with an identical URL + * source but the specified snapshot timestamp. + * + * @param snapshot The snapshot + * identifier. + * @return A new TDBlockBlobClient instance. + * @remarks Pass empty string to remove the snapshot returning the base blob. + */ + TDBlockBlobClient WithSnapshot(const std::string& snapshot) const; + + /** + * @brief Creates a clone of this instance that references a version ID rather than the base + * blob. + * + * @param versionId The version ID returning a URL to the base blob. + * @return A new TDBlockBlobClient instance. + * @remarks Pass empty string to remove the version ID returning the base blob. + */ + TDBlockBlobClient WithVersionId(const std::string& versionId) const; + + /** + * @brief Creates a new block blob, or updates the content of an existing block blob. Updating + * an existing block blob overwrites any existing metadata on the blob. + * + * @param content A BodyStream containing the content to upload. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A UploadBlockBlobResult describing the state of the updated block blob. + */ + Azure::Response Upload( + Azure::Core::IO::BodyStream& content, const UploadBlockBlobOptions& options = UploadBlockBlobOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Creates a new block blob, or updates the content of an existing block blob. Updating + * an existing block blob overwrites any existing metadata on the blob. + * + * @param buffer A memory buffer containing the content to upload. + * @param bufferSize Size of the memory buffer. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A UploadBlockBlobFromResult describing the state of the updated block blob. + */ + Azure::Response UploadFrom( + const uint8_t* buffer, size_t bufferSize, + const UploadBlockBlobFromOptions& options = UploadBlockBlobFromOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Creates a new block blob, or updates the content of an existing block blob. Updating + * an existing block blob overwrites any existing metadata on the blob. + * + * @param fileName A file containing the content to upload. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A UploadBlockBlobFromResult describing the state of the updated block blob. + */ + Azure::Response UploadFrom( + const std::string& fileName, const UploadBlockBlobFromOptions& options = UploadBlockBlobFromOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + Azure::Response UploadFrom( + const std::string& fileName, int64_t offset, int64_t size, + const UploadBlockBlobFromOptions& options = UploadBlockBlobFromOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Creates a new Block Blob where the contents of the blob are read from a given URL. + * + * @param sourceUri Specifies the URL of the source blob. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A UploadBlockBlobFromUriResult describing the state of the updated block blob. + */ + Azure::Response UploadFromUri( + const std::string& sourceUri, const UploadBlockBlobFromUriOptions& options = UploadBlockBlobFromUriOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Creates a new block as part of a block blob's staging area to be eventually + * committed via the CommitBlockList operation. + * + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the + * string must be less than or equal to 64 bytes in size. + * @param content A BodyStream containing the content to upload. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A StageBlockResult describing the state of the updated block. + */ + Azure::Response StageBlock( + const std::string& blockId, Azure::Core::IO::BodyStream& content, + const StageBlockOptions& options = StageBlockOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Creates a new block to be committed as part of a blob where the contents are read from + * the sourceUri. + * + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the + * string must be less than or equal to 64 bytes in size. + * @param sourceUri Specifies the uri of the source + * blob. The value may be a uri of up to 2 KB in length that specifies a blob. The source blob + * must either be public or must be authenticated via a shared access signature. If the source + * blob is public, no authentication is required to perform the operation. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A StageBlockFromUriResult describing the state of the updated block blob. + */ + Azure::Response StageBlockFromUri( + const std::string& blockId, const std::string& sourceUri, + const StageBlockFromUriOptions& options = StageBlockFromUriOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Writes a blob by specifying the list of block IDs that make up the blob. In order to + * be written as part of a blob, a block must have been successfully written to the server in a + * prior StageBlock operation. You can call CommitBlockList to update a blob by uploading only + * those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, + * whichever list it may belong to. + * + * @param blockIds Base64 encoded block IDs to indicate that make up the blob. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A CommitBlobBlockListResult describing the state of the updated block blob. + */ + Azure::Response CommitBlockList( + const std::vector& blockIds, const CommitBlockListOptions& options = CommitBlockListOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Retrieves the list of blocks that have been uploaded as part of a block blob. There + * are two block lists maintained for a blob. The Committed Block list has blocks that have been + * successfully committed to a given blob with CommitBlockList. The Uncommitted Block list has + * blocks that have been uploaded for a blob using StageBlock, but that have not yet been + * committed. + * + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A GetBlobBlockListResult describing requested block list. + */ + Azure::Response GetBlockList( + const GetBlockListOptions& options = GetBlockListOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + /** + * @brief Returns the result of a query against the blob. + * + * @param querySqlExpression The query expression in SQL. + * @param options Optional parameters to execute this function. + * @param context Context for cancelling long running operations. + * @return A QueryBlobResult describing the query result. + */ + Azure::Response Query(const std::string& querySqlExpression, + const QueryBlobOptions& options = QueryBlobOptions(), + const Azure::Core::Context& context = Azure::Core::Context()) const; + + explicit TDBlockBlobClient(BlobClient blobClient); + + private: + friend class BlobClient; + friend class Files::DataLake::DataLakeFileClient; +}; + +} // namespace Blobs +} // namespace Storage +} // namespace Azure diff --git a/source/libs/azure/src/avro_parser.cpp b/source/libs/azure/src/avro_parser.cpp new file mode 100644 index 0000000000..485980e007 --- /dev/null +++ b/source/libs/azure/src/avro_parser.cpp @@ -0,0 +1,531 @@ +#if defined(USE_S3) +#include "avro_parser.hpp" + +#include +#include + +#include +#include + +namespace Azure { +namespace Storage { +namespace Blobs { +namespace _detail { + +namespace { +int64_t parseInt(AvroStreamReader::ReaderPos& data) { + uint64_t r = 0; + int nb = 0; + while (true) { + uint8_t c = (*data.BufferPtr)[data.Offset++]; + r = r | ((static_cast(c) & 0x7f) << (nb * 7)); + if (c & 0x80) { + ++nb; + continue; + } + break; + } + return static_cast(r >> 1) ^ -static_cast(r & 0x01); +} + +AvroSchema ParseSchemaFromJsonString(const std::string& jsonSchema) { + const static std::map BuiltinNameSchemaMap = { + {"string", AvroSchema::StringSchema}, {"bytes", AvroSchema::BytesSchema}, {"int", AvroSchema::IntSchema}, + {"long", AvroSchema::LongSchema}, {"float", AvroSchema::FloatSchema}, {"double", AvroSchema::DoubleSchema}, + {"boolean", AvroSchema::BoolSchema}, {"null", AvroSchema::NullSchema}, {"string", AvroSchema::StringSchema}, + }; + std::map nameSchemaMap = BuiltinNameSchemaMap; + + std::function parseSchemaFromJsonObject; + parseSchemaFromJsonObject = [&](const Core::Json::_internal::json& obj) -> AvroSchema { + if (obj.is_string()) { + auto typeName = obj.get(); + return nameSchemaMap.find(typeName)->second; + } else if (obj.is_array()) { + std::vector unionSchemas; + for (const auto& s : obj) { + unionSchemas.push_back(parseSchemaFromJsonObject(s)); + } + return AvroSchema::UnionSchema(std::move(unionSchemas)); + } else if (obj.is_object()) { + if (obj.count("namespace") != 0) { + throw std::runtime_error("Namespace isn't supported yet in Avro schema."); + } + if (obj.count("aliases") != 0) { + throw std::runtime_error("Alias isn't supported yet in Avro schema."); + } + auto typeName = obj["type"].get(); + auto i = nameSchemaMap.find(typeName); + if (i != nameSchemaMap.end()) { + return i->second; + } + if (typeName == "record") { + std::vector> fieldsSchema; + for (const auto& field : obj["fields"]) { + fieldsSchema.push_back( + std::make_pair(field["name"].get(), parseSchemaFromJsonObject(field["type"]))); + } + + const std::string recordName = obj["name"].get(); + auto recordSchema = AvroSchema::RecordSchema(recordName, std::move(fieldsSchema)); + nameSchemaMap.insert(std::make_pair(recordName, recordSchema)); + return recordSchema; + } else if (typeName == "enum") { + throw std::runtime_error("Enum type isn't supported yet in Avro schema."); + } else if (typeName == "array") { + return AvroSchema::ArraySchema(parseSchemaFromJsonObject(obj["items"])); + } else if (typeName == "map") { + return AvroSchema::MapSchema(parseSchemaFromJsonObject(obj["items"])); + } else if (typeName == "fixed") { + const std::string fixedName = obj["name"].get(); + auto fixedSchema = AvroSchema::FixedSchema(fixedName, obj["size"].get()); + nameSchemaMap.insert(std::make_pair(fixedName, fixedSchema)); + return fixedSchema; + } else { + throw std::runtime_error("Unrecognized type " + typeName + " in Avro schema."); + } + } + AZURE_UNREACHABLE_CODE(); + }; + + auto jsonRoot = Core::Json::_internal::json::parse(jsonSchema.begin(), jsonSchema.end()); + return parseSchemaFromJsonObject(jsonRoot); +} +} // namespace + +int64_t AvroStreamReader::ParseInt(const Core::Context& context) { + uint64_t r = 0; + int nb = 0; + while (true) { + Preload(1, context); + uint8_t c = m_streambuffer[m_pos.Offset++]; + + r = r | ((static_cast(c) & 0x7f) << (nb * 7)); + if (c & 0x80) { + ++nb; + continue; + } + break; + } + return static_cast(r >> 1) ^ -static_cast(r & 0x01); +} + +void AvroStreamReader::Advance(size_t n, const Core::Context& context) { + Preload(n, context); + m_pos.Offset += n; +} + +size_t AvroStreamReader::Preload(size_t n, const Core::Context& context) { + size_t oldAvailable = AvailableBytes(); + while (true) { + size_t newAvailable = TryPreload(n, context); + if (newAvailable >= n) { + return newAvailable; + } + if (oldAvailable == newAvailable) { + throw std::runtime_error("Unexpected EOF of Avro stream."); + } + oldAvailable = newAvailable; + } + AZURE_UNREACHABLE_CODE(); +} + +size_t AvroStreamReader::TryPreload(size_t n, const Core::Context& context) { + size_t availableBytes = AvailableBytes(); + if (availableBytes >= n) { + return availableBytes; + } + const size_t MinRead = 4096; + size_t tryReadSize = (std::max)(n, MinRead); + size_t currSize = m_streambuffer.size(); + m_streambuffer.resize(m_streambuffer.size() + tryReadSize); + size_t actualReadSize = m_stream->Read(m_streambuffer.data() + currSize, tryReadSize, context); + m_streambuffer.resize(currSize + actualReadSize); + return AvailableBytes(); +} + +void AvroStreamReader::Discard() { + constexpr size_t MinimumReleaseMemory = 128 * 1024; + if (m_pos.Offset < MinimumReleaseMemory) { + return; + } + const size_t availableBytes = AvailableBytes(); + std::memmove(&m_streambuffer[0], &m_streambuffer[m_pos.Offset], availableBytes); + m_streambuffer.resize(availableBytes); + m_pos.Offset = 0; +} + +const AvroSchema AvroSchema::StringSchema(AvroDatumType::String); +const AvroSchema AvroSchema::BytesSchema(AvroDatumType::Bytes); +const AvroSchema AvroSchema::IntSchema(AvroDatumType::Int); +const AvroSchema AvroSchema::LongSchema(AvroDatumType::Long); +const AvroSchema AvroSchema::FloatSchema(AvroDatumType::Float); +const AvroSchema AvroSchema::DoubleSchema(AvroDatumType::Double); +const AvroSchema AvroSchema::BoolSchema(AvroDatumType::Bool); +const AvroSchema AvroSchema::NullSchema(AvroDatumType::Null); + +AvroSchema AvroSchema::RecordSchema(std::string name, + const std::vector>& fieldsSchema) { + AvroSchema recordSchema(AvroDatumType::Record); + recordSchema.m_name = std::move(name); + recordSchema.m_status = std::make_shared(); + for (auto& i : fieldsSchema) { + recordSchema.m_status->m_keys.push_back(i.first); + recordSchema.m_status->m_schemas.push_back(i.second); + } + return recordSchema; +} + +AvroSchema AvroSchema::ArraySchema(AvroSchema elementSchema) { + AvroSchema arraySchema(AvroDatumType::Array); + arraySchema.m_status = std::make_shared(); + arraySchema.m_status->m_schemas.push_back(std::move(elementSchema)); + return arraySchema; +} + +AvroSchema AvroSchema::MapSchema(AvroSchema elementSchema) { + AvroSchema mapSchema(AvroDatumType::Map); + mapSchema.m_status = std::make_shared(); + mapSchema.m_status->m_schemas.push_back(std::move(elementSchema)); + return mapSchema; +} + +AvroSchema AvroSchema::UnionSchema(std::vector schemas) { + AvroSchema unionSchema(AvroDatumType::Union); + unionSchema.m_status = std::make_shared(); + unionSchema.m_status->m_schemas = std::move(schemas); + return unionSchema; +} + +AvroSchema AvroSchema::FixedSchema(std::string name, int64_t size) { + AvroSchema fixedSchema(AvroDatumType::Fixed); + fixedSchema.m_name = std::move(name); + fixedSchema.m_status = std::make_shared(); + fixedSchema.m_status->m_size = size; + return fixedSchema; +} + +void AvroDatum::Fill(AvroStreamReader& reader, const Core::Context& context) { + m_data = reader.m_pos; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + int64_t stringSize = reader.ParseInt(context); + reader.Advance(static_cast(stringSize), context); + } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || + m_schema.Type() == AvroDatumType::Enum) { + reader.ParseInt(context); + } else if (m_schema.Type() == AvroDatumType::Float) { + reader.Advance(4, context); + } else if (m_schema.Type() == AvroDatumType::Double) { + reader.Advance(8, context); + } else if (m_schema.Type() == AvroDatumType::Bool) { + reader.Advance(1, context); + } else if (m_schema.Type() == AvroDatumType::Null) { + reader.Advance(0, context); + } else if (m_schema.Type() == AvroDatumType::Record) { + for (const auto& s : m_schema.FieldSchemas()) { + AvroDatum(s).Fill(reader, context); + } + } else if (m_schema.Type() == AvroDatumType::Array) { + while (true) { + int64_t numElementsInBlock = reader.ParseInt(context); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = reader.ParseInt(context); + reader.Advance(static_cast(blockSize), context); + } else { + for (auto i = 0; i < numElementsInBlock; ++i) { + AvroDatum(m_schema.ItemSchema()).Fill(reader, context); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Map) { + while (true) { + int64_t numElementsInBlock = reader.ParseInt(context); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = reader.ParseInt(context); + reader.Advance(static_cast(blockSize), context); + } else { + for (int64_t i = 0; i < numElementsInBlock; ++i) { + AvroDatum(AvroSchema::StringSchema).Fill(reader, context); + AvroDatum(m_schema.ItemSchema()).Fill(reader, context); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = reader.ParseInt(context); + AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(reader, context); + } else if (m_schema.Type() == AvroDatumType::Fixed) { + reader.Advance(m_schema.Size(), context); + } else { + AZURE_UNREACHABLE_CODE(); + } +} + +void AvroDatum::Fill(AvroStreamReader::ReaderPos& data) { + m_data = data; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + int64_t stringSize = parseInt(data); + data.Offset += static_cast(stringSize); + } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || + m_schema.Type() == AvroDatumType::Enum) { + parseInt(data); + } else if (m_schema.Type() == AvroDatumType::Float) { + data.Offset += 4; + } else if (m_schema.Type() == AvroDatumType::Double) { + data.Offset += 8; + } else if (m_schema.Type() == AvroDatumType::Bool) { + data.Offset += 1; + } else if (m_schema.Type() == AvroDatumType::Null) { + data.Offset += 0; + } else if (m_schema.Type() == AvroDatumType::Record) { + for (const auto& s : m_schema.FieldSchemas()) { + AvroDatum(s).Fill(data); + } + } else if (m_schema.Type() == AvroDatumType::Array) { + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = parseInt(data); + data.Offset += static_cast(blockSize); + } else { + for (auto i = 0; i < numElementsInBlock; ++i) { + AvroDatum(m_schema.ItemSchema()).Fill(data); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Map) { + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = parseInt(data); + data.Offset += static_cast(blockSize); + } else { + for (int64_t i = 0; i < numElementsInBlock; ++i) { + AvroDatum(AvroSchema::StringSchema).Fill(data); + AvroDatum(m_schema.ItemSchema()).Fill(data); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = parseInt(data); + AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(data); + } else if (m_schema.Type() == AvroDatumType::Fixed) { + data.Offset += m_schema.Size(); + } else { + AZURE_UNREACHABLE_CODE(); + } +} + +template <> +AvroDatum::StringView AvroDatum::Value() const { + auto data = m_data; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + const int64_t length = parseInt(data); + const uint8_t* start = &(*data.BufferPtr)[data.Offset]; + StringView ret{start, static_cast(length)}; + data.Offset += static_cast(length); + return ret; + } + if (m_schema.Type() == AvroDatumType::Fixed) { + const size_t fixedSize = m_schema.Size(); + const uint8_t* start = &(*data.BufferPtr)[data.Offset]; + StringView ret{start, fixedSize}; + data.Offset += fixedSize; + return ret; + } + AZURE_UNREACHABLE_CODE(); +} + +template <> +std::string AvroDatum::Value() const { + auto stringView = Value(); + return std::string(stringView.Data, stringView.Data + stringView.Length); +} + +template <> +std::vector AvroDatum::Value() const { + auto stringView = Value(); + return std::vector(stringView.Data, stringView.Data + stringView.Length); +} + +template <> +int64_t AvroDatum::Value() const { + auto data = m_data; + return parseInt(data); +} + +template <> +int32_t AvroDatum::Value() const { + return static_cast(Value()); +} + +template <> +bool AvroDatum::Value() const { + return Value(); +} + +template <> +std::nullptr_t AvroDatum::Value() const { + return nullptr; +} + +template <> +AvroRecord AvroDatum::Value() const { + auto data = m_data; + + AvroRecord r; + r.m_keys = &m_schema.FieldNames(); + for (const auto& schema : m_schema.FieldSchemas()) { + auto datum = AvroDatum(schema); + datum.Fill(data); + r.m_values.push_back(std::move(datum)); + } + + return r; +} + +template <> +AvroMap AvroDatum::Value() const { + auto data = m_data; + + AvroMap m; + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } + if (numElementsInBlock < 0) { + numElementsInBlock = -numElementsInBlock; + parseInt(data); + } + for (int64_t i = 0; i < numElementsInBlock; ++i) { + auto keyDatum = AvroDatum(AvroSchema::StringSchema); + keyDatum.Fill(data); + auto valueDatum = AvroDatum(m_schema.ItemSchema()); + valueDatum.Fill(data); + m[keyDatum.Value()] = valueDatum; + } + } + return m; +} + +template <> +AvroDatum AvroDatum::Value() const { + auto data = m_data; + if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = parseInt(data); + auto datum = AvroDatum(m_schema.FieldSchemas()[static_cast(i)]); + datum.Fill(data); + return datum; + } + AZURE_UNREACHABLE_CODE(); +} + +AvroObjectContainerReader::AvroObjectContainerReader(Core::IO::BodyStream& stream) + : m_reader(std::make_unique(stream)) {} + +AvroDatum AvroObjectContainerReader::NextImpl(const AvroSchema* schema, const Core::Context& context) { + AZURE_ASSERT_FALSE(m_eof); + static const auto SyncMarkerSchema = AvroSchema::FixedSchema("Sync", 16); + if (!schema) { + static AvroSchema FileHeaderSchema = []() { + std::vector> fieldsSchema; + fieldsSchema.push_back(std::make_pair("magic", AvroSchema::FixedSchema("Magic", 4))); + fieldsSchema.push_back(std::make_pair("meta", AvroSchema::MapSchema(AvroSchema::BytesSchema))); + fieldsSchema.push_back(std::make_pair("sync", SyncMarkerSchema)); + return AvroSchema::RecordSchema("org.apache.avro.file.Header", std::move(fieldsSchema)); + }(); + auto fileHeaderDatum = AvroDatum(FileHeaderSchema); + fileHeaderDatum.Fill(*m_reader, context); + auto fileHeader = fileHeaderDatum.Value(); + if (fileHeader.Field("magic").Value() != "Obj\01") { + throw std::runtime_error("Invalid Avro object container magic."); + } + AvroMap meta = fileHeader.Field("meta").Value(); + std::string objectSchemaJson = meta["avro.schema"].Value(); + std::string codec = "null"; + if (meta.count("avro.codec") != 0) { + codec = meta["avro.codec"].Value(); + } + if (codec != "null") { + throw std::runtime_error("Unsupported Avro codec: " + codec); + } + m_syncMarker = fileHeader.Field("sync").Value(); + m_objectSchema = std::make_unique(ParseSchemaFromJsonString(objectSchemaJson)); + schema = m_objectSchema.get(); + } + + if (m_remainingObjectInCurrentBlock == 0) { + m_reader->Discard(); + m_remainingObjectInCurrentBlock = m_reader->ParseInt(context); + int64_t ObjectsSize = m_reader->ParseInt(context); + m_reader->Preload(static_cast(ObjectsSize), context); + } + + auto objectDatum = AvroDatum(*m_objectSchema); + objectDatum.Fill(*m_reader, context); + if (--m_remainingObjectInCurrentBlock == 0) { + auto markerDatum = AvroDatum(SyncMarkerSchema); + markerDatum.Fill(*m_reader, context); + auto marker = markerDatum.Value(); + if (marker != m_syncMarker) { + throw std::runtime_error("Sync marker doesn't match."); + } + m_eof = m_reader->TryPreload(1, context) == 0; + } + return objectDatum; +} + +size_t AvroStreamParser::OnRead(uint8_t* buffer, size_t count, Azure::Core::Context const& context) { + if (m_parserBuffer.Length != 0) { + size_t bytesToCopy = (std::min)(m_parserBuffer.Length, count); + std::memcpy(buffer, m_parserBuffer.Data, bytesToCopy); + m_parserBuffer.Data += bytesToCopy; + m_parserBuffer.Length -= bytesToCopy; + return bytesToCopy; + } + while (!m_parser.End()) { + auto datum = m_parser.Next(context); + if (datum.Schema().Type() == AvroDatumType::Union) { + datum = datum.Value(); + } + if (datum.Schema().Type() != AvroDatumType::Record) { + continue; + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.resultData") { + auto record = datum.Value(); + auto dataDatum = record.Field("data"); + m_parserBuffer = dataDatum.Value(); + return OnRead(buffer, count, context); + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.progress" && m_progressCallback) { + auto record = datum.Value(); + auto bytesScanned = record.Field("bytesScanned").Value(); + auto totalBytes = record.Field("totalBytes").Value(); + m_progressCallback(bytesScanned, totalBytes); + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.error" && m_errorCallback) { + auto record = datum.Value(); + BlobQueryError e; + e.Name = record.Field("name").Value(); + e.Description = record.Field("description").Value(); + e.IsFatal = record.Field("fatal").Value(); + e.Position = record.Field("position").Value(); + m_errorCallback(std::move(e)); + } + } + return 0; +} +} // namespace _detail +} // namespace Blobs +} // namespace Storage +} // namespace Azure + +#endif diff --git a/source/libs/azure/src/avro_parser.hpp b/source/libs/azure/src/avro_parser.hpp new file mode 100644 index 0000000000..275d073c85 --- /dev/null +++ b/source/libs/azure/src/avro_parser.hpp @@ -0,0 +1,198 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +#pragma once + +#include "azure/storage/blobs/blob_options.hpp" + +#include + +#include +#include +#include + +namespace Azure { namespace Storage { namespace Blobs { namespace _detail { + enum class AvroDatumType + { + String, + Bytes, + Int, + Long, + Float, + Double, + Bool, + Null, + Record, + Enum, + Array, + Map, + Union, + Fixed, + }; + + class AvroStreamReader final { + public: + // position of a vector that lives through vector resizing + struct ReaderPos final + { + const std::vector* BufferPtr = nullptr; + size_t Offset = 0; + }; + explicit AvroStreamReader(Core::IO::BodyStream& stream) + : m_stream(&stream), m_pos{&m_streambuffer, 0} + { + } + AvroStreamReader(const AvroStreamReader&) = delete; + AvroStreamReader& operator=(const AvroStreamReader&) = delete; + + int64_t ParseInt(const Core::Context& context); + void Advance(size_t n, const Core::Context& context); + // Read at least n bytes from m_stream and append data to m_streambuffer. Return number of bytes + // available in m_streambuffer; + size_t Preload(size_t n, const Core::Context& context); + size_t TryPreload(size_t n, const Core::Context& context); + // discards data that's before m_pos + void Discard(); + + private: + size_t AvailableBytes() const { return m_streambuffer.size() - m_pos.Offset; } + + private: + Core::IO::BodyStream* m_stream; + std::vector m_streambuffer; + ReaderPos m_pos; + + friend class AvroDatum; + }; + + class AvroSchema final { + public: + static const AvroSchema StringSchema; + static const AvroSchema BytesSchema; + static const AvroSchema IntSchema; + static const AvroSchema LongSchema; + static const AvroSchema FloatSchema; + static const AvroSchema DoubleSchema; + static const AvroSchema BoolSchema; + static const AvroSchema NullSchema; + static AvroSchema RecordSchema( + std::string name, + const std::vector>& fieldsSchema); + static AvroSchema ArraySchema(AvroSchema elementSchema); + static AvroSchema MapSchema(AvroSchema elementSchema); + static AvroSchema UnionSchema(std::vector schemas); + static AvroSchema FixedSchema(std::string name, int64_t size); + + const std::string& Name() const { return m_name; } + AvroDatumType Type() const { return m_type; } + const std::vector& FieldNames() const { return m_status->m_keys; } + AvroSchema ItemSchema() const { return m_status->m_schemas[0]; } + const std::vector& FieldSchemas() const { return m_status->m_schemas; } + size_t Size() const { return static_cast(m_status->m_size); } + + private: + explicit AvroSchema(AvroDatumType type) : m_type(type) {} + + private: + AvroDatumType m_type; + std::string m_name; + + struct SharedStatus + { + std::vector m_keys; + std::vector m_schemas; + int64_t m_size = 0; + }; + std::shared_ptr m_status; + }; + + class AvroDatum final { + public: + AvroDatum() : m_schema(AvroSchema::NullSchema) {} + explicit AvroDatum(AvroSchema schema) : m_schema(std::move(schema)) {} + + void Fill(AvroStreamReader& reader, const Core::Context& context); + void Fill(AvroStreamReader::ReaderPos& data); + + const AvroSchema& Schema() const { return m_schema; } + + template T Value() const; + struct StringView + { + const uint8_t* Data = nullptr; + size_t Length = 0; + }; + + private: + AvroSchema m_schema; + AvroStreamReader::ReaderPos m_data; + }; + + using AvroMap = std::map; + + class AvroRecord final { + public: + bool HasField(const std::string& key) const { return FindField(key) != m_keys->size(); } + const AvroDatum& Field(const std::string& key) const { return m_values.at(FindField(key)); } + AvroDatum& Field(const std::string& key) { return m_values.at(FindField(key)); } + const AvroDatum& FieldAt(size_t i) const { return m_values.at(i); } + AvroDatum& FieldAt(size_t i) { return m_values.at(i); } + + private: + size_t FindField(const std::string& key) const + { + auto i = find(m_keys->begin(), m_keys->end(), key); + return i - m_keys->begin(); + } + const std::vector* m_keys = nullptr; + std::vector m_values; + + friend class AvroDatum; + }; + + class AvroObjectContainerReader final { + public: + explicit AvroObjectContainerReader(Core::IO::BodyStream& stream); + + bool End() const { return m_eof; } + // Calling Next() will invalidates the previous AvroDatum returned by this function and all + // AvroDatums propagated from there. + AvroDatum Next(const Core::Context& context) { return NextImpl(m_objectSchema.get(), context); } + + private: + AvroDatum NextImpl(const AvroSchema* schema, const Core::Context& context); + + private: + std::unique_ptr m_reader; + std::unique_ptr m_objectSchema; + std::string m_syncMarker; + int64_t m_remainingObjectInCurrentBlock = 0; + bool m_eof = false; + }; + + class AvroStreamParser final : public Core::IO::BodyStream { + public: + explicit AvroStreamParser( + std::unique_ptr inner, + std::function progressCallback, + std::function errorCallback) + : m_inner(std::move(inner)), m_parser(*m_inner), + m_progressCallback(std::move(progressCallback)), m_errorCallback(std::move(errorCallback)) + { + } + + int64_t Length() const override { return -1; } + void Rewind() override { this->m_inner->Rewind(); } + + private: + size_t OnRead(uint8_t* buffer, size_t count, const Azure::Core::Context& context) override; + + private: + std::unique_ptr m_inner; + AvroObjectContainerReader m_parser; + std::function m_progressCallback; + std::function m_errorCallback; + AvroDatum::StringView m_parserBuffer; + }; + +}}}} // namespace Azure::Storage::Blobs::_detail diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp new file mode 100644 index 0000000000..83cba1c877 --- /dev/null +++ b/source/libs/azure/src/az.cpp @@ -0,0 +1,402 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#define ALLOW_FORBID_FUNC + +#include "az.h" + +#include "os.h" +#include "taoserror.h" +#include "tglobal.h" + +#if defined(USE_S3) + +#include +#include +#include "td_block_blob_client.hpp" + +// Add appropriate using namespace directives +using namespace Azure::Storage; +using namespace Azure::Storage::Blobs; + +extern char tsS3Hostname[][TSDB_FQDN_LEN]; +extern char tsS3AccessKeyId[][TSDB_FQDN_LEN]; +extern char tsS3AccessKeySecret[][TSDB_FQDN_LEN]; +extern char tsS3BucketName[TSDB_FQDN_LEN]; + +extern int8_t tsS3Enabled; +extern int8_t tsS3EpNum; + +int32_t azBegin() { return TSDB_CODE_SUCCESS; } + +void azEnd() {} + +static void azDumpCfgByEp(int8_t epIndex) { + // clang-format off + (void)fprintf(stdout, + "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n" + // "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n", + "hostName", tsS3Hostname[epIndex], + "bucketName", tsS3BucketName, + "protocol", "https only", + //"uristyle", (uriStyleG[epIndex] == S3UriStyleVirtualHost ? "virtualhost" : "path"), + "accessKey", tsS3AccessKeyId[epIndex], + "accessKeySecret", tsS3AccessKeySecret[epIndex]); + // clang-format on +} + +static int32_t azListBucket(char const *bucketname) { + int32_t code = 0; + const std::string delimiter = "/"; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; + std::string accountURL = tsS3Hostname[0]; + accountURL = "https://" + accountURL; + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = bucketname; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + Azure::Storage::Blobs::ListBlobsOptions options; + options.Prefix = "s3"; + + (void)fprintf(stderr, "objects:\n"); + // std::set listBlobs; + for (auto pageResult = containerClient.ListBlobs(options); pageResult.HasPage(); pageResult.MoveToNextPage()) { + for (const auto &blob : pageResult.Blobs) { + (void)fprintf(stderr, "%s\n", blob.Name.c_str()); + } + } + } catch (const Azure::Core::RequestFailedException &e) { + uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + // uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); + + code = TAOS_SYSTEM_ERROR(EIO); + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + +int32_t azCheckCfg() { + int32_t code = 0, lino = 0; + int8_t i = 0; + + // for (; i < tsS3EpNum; i++) { + (void)fprintf(stdout, "test s3 ep (%d/%d):\n", i + 1, tsS3EpNum); + // s3DumpCfgByEp(i); + azDumpCfgByEp(0); + + // test put + char testdata[17] = "0123456789abcdef"; + const char *objectname[] = {"s3test.txt"}; + char path[PATH_MAX] = {0}; + int ds_len = strlen(TD_DIRSEP); + int tmp_len = strlen(tsTempDir); + + (void)snprintf(path, PATH_MAX, "%s", tsTempDir); + if (strncmp(tsTempDir + tmp_len - ds_len, TD_DIRSEP, ds_len) != 0) { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", TD_DIRSEP); + (void)snprintf(path + tmp_len + ds_len, PATH_MAX - tmp_len - ds_len, "%s", objectname[0]); + } else { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", objectname[0]); + } + + uint8_t *pBlock = NULL; + int c_offset = 10; + int c_len = 6; + char buf[7] = {0}; + + TdFilePtr fp = taosOpenFile(path, TD_FILE_CREATE | TD_FILE_WRITE | TD_FILE_READ | TD_FILE_TRUNC); + if (!fp) { + (void)fprintf(stderr, "failed to open test file: %s.\n", path); + // uError("ERROR: %s Failed to open %s", __func__, path); + TAOS_CHECK_GOTO(terrno, &lino, _next); + } + if (taosWriteFile(fp, testdata, strlen(testdata)) < 0) { + (void)fprintf(stderr, "failed to write test file: %s.\n", path); + TAOS_CHECK_GOTO(terrno, &lino, _next); + } + if (taosFsyncFile(fp) < 0) { + (void)fprintf(stderr, "failed to fsync test file: %s.\n", path); + TAOS_CHECK_GOTO(terrno, &lino, _next); + } + (void)taosCloseFile(&fp); + + (void)fprintf(stderr, "\nstart to put object: %s, file: %s content: %s\n", objectname[0], path, testdata); + code = azPutObjectFromFileOffset(path, objectname[0], 0, 16); + if (code != 0) { + (void)fprintf(stderr, "put object %s : failed.\n", objectname[0]); + TAOS_CHECK_GOTO(code, &lino, _next); + } + (void)fprintf(stderr, "put object %s: success.\n\n", objectname[0]); + + // list buckets + (void)fprintf(stderr, "start to list bucket %s by prefix s3.\n", tsS3BucketName); + // code = s3ListBucketByEp(tsS3BucketName, i); + code = azListBucket(tsS3BucketName); + if (code != 0) { + (void)fprintf(stderr, "listing bucket %s : failed.\n", tsS3BucketName); + TAOS_CHECK_GOTO(code, &lino, _next); + } + (void)fprintf(stderr, "listing bucket %s: success.\n\n", tsS3BucketName); + + // test range get + (void)fprintf(stderr, "start to range get object %s offset: %d len: %d.\n", objectname[0], c_offset, c_len); + code = azGetObjectBlock(objectname[0], c_offset, c_len, true, &pBlock); + if (code != 0) { + (void)fprintf(stderr, "get object %s : failed.\n", objectname[0]); + TAOS_CHECK_GOTO(code, &lino, _next); + } + + (void)memcpy(buf, pBlock, c_len); + taosMemoryFree(pBlock); + (void)fprintf(stderr, "object content: %s\n", buf); + (void)fprintf(stderr, "get object %s: success.\n\n", objectname[0]); + + // delete test object + (void)fprintf(stderr, "start to delete object: %s.\n", objectname[0]); + // code = azDeleteObjectsByPrefix(objectname[0]); + azDeleteObjectsByPrefix(objectname[0]); + /* + if (code != 0) { + (void)fprintf(stderr, "delete object %s : failed.\n", objectname[0]); + TAOS_CHECK_GOTO(code, &lino, _next); + } + */ + (void)fprintf(stderr, "delete object %s: success.\n\n", objectname[0]); + +_next: + if (fp) { + (void)taosCloseFile(&fp); + } + + if (TSDB_CODE_SUCCESS != code) { + (void)fprintf(stderr, "s3 check failed, code: %d, line: %d, index: %d.\n", code, lino, i); + } + + (void)fprintf(stdout, "=================================================================\n"); + //} + + // azEnd(); + + TAOS_RETURN(code); +} + +int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { + int32_t code = 0; + + std::string endpointUrl = tsS3Hostname[0]; // GetEndpointUrl(); + std::string accountName = tsS3AccessKeyId[0]; // GetAccountName(); + std::string accountKey = tsS3AccessKeySecret[0]; // GetAccountKey(); + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + std::string accountURL = tsS3Hostname[0]; + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + accountURL = "https://" + accountURL; + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = tsS3BucketName; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + // Create the container if it does not exist + // std::cout << "Creating container: " << containerName << std::endl; + // containerClient.CreateIfNotExists(); + + std::string blobName = "blob.txt"; + uint8_t blobContent[] = "Hello Azure!"; + // Create the block blob client + // BlockBlobClient blobClient = containerClient.GetBlockBlobClient(blobName); + // TDBlockBlobClient blobClient(containerClient.GetBlobClient(blobName)); + TDBlockBlobClient blobClient(containerClient.GetBlobClient(object_name)); + + // Upload the blob + // std::cout << "Uploading blob: " << blobName << std::endl; + // blobClient.UploadFrom(blobContent, sizeof(blobContent)); + blobClient.UploadFrom(file, offset, size); + //(void)_azUploadFrom(blobClient, file, offset, size); + /* + auto blockBlobClient = BlockBlobClient(endpointUrl, sharedKeyCredential); + + // Create some data to upload into the blob. + std::vector data = {1, 2, 3, 4}; + Azure::Core::IO::MemoryBodyStream stream(data); + + Azure::Response response = blockBlobClient.Upload(stream); + + Models::UploadBlockBlobResult model = response.Value; + std::cout << "Last modified date of uploaded blob: " << model.LastModified.ToString() + << std::endl; + */ + } catch (const Azure::Core::RequestFailedException &e) { + /* + std::cout << "Status Code: " << static_cast(e.StatusCode) << ", Reason Phrase: " << e.ReasonPhrase + << std::endl; + std::cout << e.what() << std::endl; + */ + code = TAOS_SYSTEM_ERROR(EIO); + uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + +int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { + int32_t code = TSDB_CODE_SUCCESS; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; + std::string accountURL = tsS3Hostname[0]; + accountURL = "https://" + accountURL; + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = tsS3BucketName; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + TDBlockBlobClient blobClient(containerClient.GetBlobClient(object_name)); + + uint8_t *buf = (uint8_t *)taosMemoryCalloc(1, size); + if (!buf) { + return terrno; + } + + Blobs::DownloadBlobToOptions options; + // options.TransferOptions.Concurrency = concurrency; + // if (offset.HasValue() || length.HasValue()) { + options.Range = Azure::Core::Http::HttpRange(); + options.Range.Value().Offset = offset; + options.Range.Value().Length = size; + //} + /* + if (initialChunkSize.HasValue()) { + options.TransferOptions.InitialChunkSize = initialChunkSize.Value(); + } + if (chunkSize.HasValue()) { + options.TransferOptions.ChunkSize = chunkSize.Value(); + } + */ + + auto res = blobClient.DownloadTo(buf, size, options); + if (check && res.Value.ContentRange.Length.Value() != size) { + code = TAOS_SYSTEM_ERROR(EIO); + uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); + } + + *ppBlock = buf; + } catch (const Azure::Core::RequestFailedException &e) { + uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + code = TAOS_SYSTEM_ERROR(EIO); + uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + +void azDeleteObjectsByPrefix(const char *prefix) { + const std::string delimiter = "/"; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; + std::string accountURL = tsS3Hostname[0]; + accountURL = "https://" + accountURL; + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = tsS3BucketName; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + Azure::Storage::Blobs::ListBlobsOptions options; + options.Prefix = prefix; + + std::set listBlobs; + for (auto pageResult = containerClient.ListBlobs(options); pageResult.HasPage(); pageResult.MoveToNextPage()) { + for (const auto &blob : pageResult.Blobs) { + listBlobs.insert(blob.Name); + } + } + + for (auto blobName : listBlobs) { + auto blobClient = containerClient.GetAppendBlobClient(blobName); + blobClient.Delete(); + } + } catch (const Azure::Core::RequestFailedException &e) { + uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + // uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); + } +} + +int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp) { return 0; } + +int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { return 0; } + +int32_t azGetObjectToFile(const char *object_name, const char *fileName) { return 0; } + +int32_t azDeleteObjects(const char *object_name[], int nobject) { return 0; } + +#else + +int32_t azBegin() { return TSDB_CODE_SUCCESS; } + +void azEnd() {} + +int32_t azCheckCfg() { return TSDB_CODE_SUCCESS; } + +int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { + return TSDB_CODE_SUCCESS; +} + +int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { + return TSDB_CODE_SUCCESS; +} + +void azDeleteObjectsByPrefix(const char *prefix) {} + +int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp) { return 0; } + +int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { return 0; } + +int32_t azGetObjectToFile(const char *object_name, const char *fileName) { return 0; } + +int32_t azDeleteObjects(const char *object_name[], int nobject) { return 0; } + +#endif diff --git a/source/libs/azure/src/td_block_blob_client.cpp b/source/libs/azure/src/td_block_blob_client.cpp new file mode 100644 index 0000000000..e75c6ae17f --- /dev/null +++ b/source/libs/azure/src/td_block_blob_client.cpp @@ -0,0 +1,625 @@ +#if defined(USE_S3) + +#include "td_block_blob_client.hpp" + +#include + +#if defined(AZ_PLATFORM_WINDOWS) +#if !defined(WIN32_LEAN_AND_MEAN) +#define WIN32_LEAN_AND_MEAN +#endif +#if !defined(NOMINMAX) +#define NOMINMAX +#endif +#include +#endif + +#include "./avro_parser.hpp" + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace Azure { +namespace Storage { +namespace Blobs { + +TDBlockBlobClient TDBlockBlobClient::CreateFromConnectionString(const std::string& connectionString, + const std::string& blobContainerName, + const std::string& blobName, + const BlobClientOptions& options) { + TDBlockBlobClient newClient( + BlobClient::CreateFromConnectionString(connectionString, blobContainerName, blobName, options)); + return newClient; +} + +TDBlockBlobClient::TDBlockBlobClient(const std::string& blobUrl, std::shared_ptr credential, + const BlobClientOptions& options) + : BlobClient(blobUrl, std::move(credential), options) {} + +TDBlockBlobClient::TDBlockBlobClient(const std::string& blobUrl, + std::shared_ptr credential, + const BlobClientOptions& options) + : BlobClient(blobUrl, std::move(credential), options) {} + +TDBlockBlobClient::TDBlockBlobClient(const std::string& blobUrl, const BlobClientOptions& options) + : BlobClient(blobUrl, options) {} + +TDBlockBlobClient::TDBlockBlobClient(BlobClient blobClient) : BlobClient(std::move(blobClient)) {} + +TDBlockBlobClient TDBlockBlobClient::WithSnapshot(const std::string& snapshot) const { + TDBlockBlobClient newClient(*this); + if (snapshot.empty()) { + newClient.m_blobUrl.RemoveQueryParameter(_internal::HttpQuerySnapshot); + } else { + newClient.m_blobUrl.AppendQueryParameter(_internal::HttpQuerySnapshot, + _internal::UrlEncodeQueryParameter(snapshot)); + } + return newClient; +} + +TDBlockBlobClient TDBlockBlobClient::WithVersionId(const std::string& versionId) const { + TDBlockBlobClient newClient(*this); + if (versionId.empty()) { + newClient.m_blobUrl.RemoveQueryParameter(_internal::HttpQueryVersionId); + } else { + newClient.m_blobUrl.AppendQueryParameter(_internal::HttpQueryVersionId, + _internal::UrlEncodeQueryParameter(versionId)); + } + return newClient; +} + +Azure::Response TDBlockBlobClient::Upload(Azure::Core::IO::BodyStream& content, + const UploadBlockBlobOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::UploadBlockBlobOptions protocolLayerOptions; + if (options.TransactionalContentHash.HasValue()) { + if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Md5) { + protocolLayerOptions.TransactionalContentMD5 = options.TransactionalContentHash.Value().Value; + } else if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Crc64) { + protocolLayerOptions.TransactionalContentCrc64 = options.TransactionalContentHash.Value().Value; + } + } + protocolLayerOptions.BlobContentType = options.HttpHeaders.ContentType; + protocolLayerOptions.BlobContentEncoding = options.HttpHeaders.ContentEncoding; + protocolLayerOptions.BlobContentLanguage = options.HttpHeaders.ContentLanguage; + protocolLayerOptions.BlobContentMD5 = options.HttpHeaders.ContentHash.Value; + protocolLayerOptions.BlobContentDisposition = options.HttpHeaders.ContentDisposition; + protocolLayerOptions.BlobCacheControl = options.HttpHeaders.CacheControl; + protocolLayerOptions.Metadata = std::map(options.Metadata.begin(), options.Metadata.end()); + protocolLayerOptions.BlobTagsString = _detail::TagsToString(options.Tags); + protocolLayerOptions.Tier = options.AccessTier; + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + protocolLayerOptions.IfModifiedSince = options.AccessConditions.IfModifiedSince; + protocolLayerOptions.IfUnmodifiedSince = options.AccessConditions.IfUnmodifiedSince; + protocolLayerOptions.IfMatch = options.AccessConditions.IfMatch; + protocolLayerOptions.IfNoneMatch = options.AccessConditions.IfNoneMatch; + protocolLayerOptions.IfTags = options.AccessConditions.TagConditions; + if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); + } + protocolLayerOptions.EncryptionScope = m_encryptionScope; + if (options.ImmutabilityPolicy.HasValue()) { + protocolLayerOptions.ImmutabilityPolicyExpiry = options.ImmutabilityPolicy.Value().ExpiresOn; + protocolLayerOptions.ImmutabilityPolicyMode = options.ImmutabilityPolicy.Value().PolicyMode; + } + protocolLayerOptions.LegalHold = options.HasLegalHold; + + return _detail::BlockBlobClient::Upload(*m_pipeline, m_blobUrl, content, protocolLayerOptions, context); +} + +Azure::Response TDBlockBlobClient::UploadFrom( + const uint8_t* buffer, size_t bufferSize, const UploadBlockBlobFromOptions& options, + const Azure::Core::Context& context) const { + constexpr int64_t DefaultStageBlockSize = 4 * 1024 * 1024ULL; + constexpr int64_t MaxStageBlockSize = 4000 * 1024 * 1024ULL; + constexpr int64_t MaxBlockNumber = 50000; + constexpr int64_t BlockGrainSize = 1 * 1024 * 1024; + + if (static_cast(options.TransferOptions.SingleUploadThreshold) > (std::numeric_limits::max)()) { + throw Azure::Core::RequestFailedException("Single upload threshold is too big"); + } + if (bufferSize <= static_cast(options.TransferOptions.SingleUploadThreshold)) { + Azure::Core::IO::MemoryBodyStream contentStream(buffer, bufferSize); + UploadBlockBlobOptions uploadBlockBlobOptions; + uploadBlockBlobOptions.HttpHeaders = options.HttpHeaders; + uploadBlockBlobOptions.Metadata = options.Metadata; + uploadBlockBlobOptions.Tags = options.Tags; + uploadBlockBlobOptions.AccessTier = options.AccessTier; + uploadBlockBlobOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + uploadBlockBlobOptions.HasLegalHold = options.HasLegalHold; + return Upload(contentStream, uploadBlockBlobOptions, context); + } + + int64_t chunkSize; + if (options.TransferOptions.ChunkSize.HasValue()) { + chunkSize = options.TransferOptions.ChunkSize.Value(); + } else { + int64_t minChunkSize = (bufferSize + MaxBlockNumber - 1) / MaxBlockNumber; + minChunkSize = (minChunkSize + BlockGrainSize - 1) / BlockGrainSize * BlockGrainSize; + chunkSize = (std::max)(DefaultStageBlockSize, minChunkSize); + } + if (chunkSize > MaxStageBlockSize) { + throw Azure::Core::RequestFailedException("Block size is too big."); + } + + std::vector blockIds; + auto getBlockId = [](int64_t id) { + constexpr size_t BlockIdLength = 64; + std::string blockId = std::to_string(id); + blockId = std::string(BlockIdLength - blockId.length(), '0') + blockId; + return Azure::Core::Convert::Base64Encode(std::vector(blockId.begin(), blockId.end())); + }; + + auto uploadBlockFunc = [&](int64_t offset, int64_t length, int64_t chunkId, int64_t numChunks) { + Azure::Core::IO::MemoryBodyStream contentStream(buffer + offset, static_cast(length)); + StageBlockOptions chunkOptions; + auto blockInfo = StageBlock(getBlockId(chunkId), contentStream, chunkOptions, context); + if (chunkId == numChunks - 1) { + blockIds.resize(static_cast(numChunks)); + } + }; + + _internal::ConcurrentTransfer(0, bufferSize, chunkSize, options.TransferOptions.Concurrency, uploadBlockFunc); + + for (size_t i = 0; i < blockIds.size(); ++i) { + blockIds[i] = getBlockId(static_cast(i)); + } + CommitBlockListOptions commitBlockListOptions; + commitBlockListOptions.HttpHeaders = options.HttpHeaders; + commitBlockListOptions.Metadata = options.Metadata; + commitBlockListOptions.Tags = options.Tags; + commitBlockListOptions.AccessTier = options.AccessTier; + commitBlockListOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + commitBlockListOptions.HasLegalHold = options.HasLegalHold; + auto commitBlockListResponse = CommitBlockList(blockIds, commitBlockListOptions, context); + + Models::UploadBlockBlobFromResult ret; + ret.ETag = std::move(commitBlockListResponse.Value.ETag); + ret.LastModified = std::move(commitBlockListResponse.Value.LastModified); + ret.VersionId = std::move(commitBlockListResponse.Value.VersionId); + ret.IsServerEncrypted = commitBlockListResponse.Value.IsServerEncrypted; + ret.EncryptionKeySha256 = std::move(commitBlockListResponse.Value.EncryptionKeySha256); + ret.EncryptionScope = std::move(commitBlockListResponse.Value.EncryptionScope); + return Azure::Response(std::move(ret), + std::move(commitBlockListResponse.RawResponse)); +} + +Azure::Response TDBlockBlobClient::UploadFrom( + const std::string& fileName, const UploadBlockBlobFromOptions& options, const Azure::Core::Context& context) const { + constexpr int64_t DefaultStageBlockSize = 4 * 1024 * 1024ULL; + constexpr int64_t MaxStageBlockSize = 4000 * 1024 * 1024ULL; + constexpr int64_t MaxBlockNumber = 50000; + constexpr int64_t BlockGrainSize = 1 * 1024 * 1024; + + { + Azure::Core::IO::FileBodyStream contentStream(fileName); + + if (contentStream.Length() <= options.TransferOptions.SingleUploadThreshold) { + UploadBlockBlobOptions uploadBlockBlobOptions; + uploadBlockBlobOptions.HttpHeaders = options.HttpHeaders; + uploadBlockBlobOptions.Metadata = options.Metadata; + uploadBlockBlobOptions.Tags = options.Tags; + uploadBlockBlobOptions.AccessTier = options.AccessTier; + uploadBlockBlobOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + uploadBlockBlobOptions.HasLegalHold = options.HasLegalHold; + return Upload(contentStream, uploadBlockBlobOptions, context); + } + } + + std::vector blockIds; + auto getBlockId = [](int64_t id) { + constexpr size_t BlockIdLength = 64; + std::string blockId = std::to_string(id); + blockId = std::string(BlockIdLength - blockId.length(), '0') + blockId; + return Azure::Core::Convert::Base64Encode(std::vector(blockId.begin(), blockId.end())); + }; + + _internal::FileReader fileReader(fileName); + + auto uploadBlockFunc = [&](int64_t offset, int64_t length, int64_t chunkId, int64_t numChunks) { + Azure::Core::IO::_internal::RandomAccessFileBodyStream contentStream(fileReader.GetHandle(), offset, length); + StageBlockOptions chunkOptions; + auto blockInfo = StageBlock(getBlockId(chunkId), contentStream, chunkOptions, context); + if (chunkId == numChunks - 1) { + blockIds.resize(static_cast(numChunks)); + } + }; + + int64_t chunkSize; + if (options.TransferOptions.ChunkSize.HasValue()) { + chunkSize = options.TransferOptions.ChunkSize.Value(); + } else { + int64_t minChunkSize = (fileReader.GetFileSize() + MaxBlockNumber - 1) / MaxBlockNumber; + minChunkSize = (minChunkSize + BlockGrainSize - 1) / BlockGrainSize * BlockGrainSize; + chunkSize = (std::max)(DefaultStageBlockSize, minChunkSize); + } + if (chunkSize > MaxStageBlockSize) { + throw Azure::Core::RequestFailedException("Block size is too big."); + } + + _internal::ConcurrentTransfer(0, fileReader.GetFileSize(), chunkSize, options.TransferOptions.Concurrency, + uploadBlockFunc); + + for (size_t i = 0; i < blockIds.size(); ++i) { + blockIds[i] = getBlockId(static_cast(i)); + } + CommitBlockListOptions commitBlockListOptions; + commitBlockListOptions.HttpHeaders = options.HttpHeaders; + commitBlockListOptions.Metadata = options.Metadata; + commitBlockListOptions.Tags = options.Tags; + commitBlockListOptions.AccessTier = options.AccessTier; + commitBlockListOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + commitBlockListOptions.HasLegalHold = options.HasLegalHold; + auto commitBlockListResponse = CommitBlockList(blockIds, commitBlockListOptions, context); + + Models::UploadBlockBlobFromResult result; + result.ETag = commitBlockListResponse.Value.ETag; + result.LastModified = commitBlockListResponse.Value.LastModified; + result.VersionId = commitBlockListResponse.Value.VersionId; + result.IsServerEncrypted = commitBlockListResponse.Value.IsServerEncrypted; + result.EncryptionKeySha256 = commitBlockListResponse.Value.EncryptionKeySha256; + result.EncryptionScope = commitBlockListResponse.Value.EncryptionScope; + return Azure::Response(std::move(result), + std::move(commitBlockListResponse.RawResponse)); +} + +Azure::Response TDBlockBlobClient::UploadFrom( + const std::string& fileName, int64_t offset, int64_t size, const UploadBlockBlobFromOptions& options, + const Azure::Core::Context& context) const { + _internal::FileReader fileReader(fileName); + + { + Azure::Core::IO::_internal::RandomAccessFileBodyStream contentStream(fileReader.GetHandle(), offset, size); + + if (size <= options.TransferOptions.SingleUploadThreshold) { + UploadBlockBlobOptions uploadBlockBlobOptions; + uploadBlockBlobOptions.HttpHeaders = options.HttpHeaders; + uploadBlockBlobOptions.Metadata = options.Metadata; + uploadBlockBlobOptions.Tags = options.Tags; + uploadBlockBlobOptions.AccessTier = options.AccessTier; + uploadBlockBlobOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + uploadBlockBlobOptions.HasLegalHold = options.HasLegalHold; + return Upload(contentStream, uploadBlockBlobOptions, context); + } + } + + std::vector blockIds; + auto getBlockId = [](int64_t id) { + constexpr size_t BlockIdLength = 64; + std::string blockId = std::to_string(id); + blockId = std::string(BlockIdLength - blockId.length(), '0') + blockId; + return Azure::Core::Convert::Base64Encode(std::vector(blockId.begin(), blockId.end())); + }; + + auto uploadBlockFunc = [&](int64_t offset, int64_t length, int64_t chunkId, int64_t numChunks) { + Azure::Core::IO::_internal::RandomAccessFileBodyStream contentStream(fileReader.GetHandle(), offset, length); + StageBlockOptions chunkOptions; + auto blockInfo = StageBlock(getBlockId(chunkId), contentStream, chunkOptions, context); + if (chunkId == numChunks - 1) { + blockIds.resize(static_cast(numChunks)); + } + }; + + constexpr int64_t DefaultStageBlockSize = 4 * 1024 * 1024ULL; + constexpr int64_t MaxStageBlockSize = 4000 * 1024 * 1024ULL; + constexpr int64_t MaxBlockNumber = 50000; + constexpr int64_t BlockGrainSize = 1 * 1024 * 1024; + + int64_t chunkSize; + if (options.TransferOptions.ChunkSize.HasValue()) { + chunkSize = options.TransferOptions.ChunkSize.Value(); + } else { + int64_t minChunkSize = (size + MaxBlockNumber - 1) / MaxBlockNumber; + minChunkSize = (minChunkSize + BlockGrainSize - 1) / BlockGrainSize * BlockGrainSize; + chunkSize = (std::max)(DefaultStageBlockSize, minChunkSize); + } + if (chunkSize > MaxStageBlockSize) { + throw Azure::Core::RequestFailedException("Block size is too big."); + } + + _internal::ConcurrentTransfer(offset, size, chunkSize, options.TransferOptions.Concurrency, uploadBlockFunc); + + for (size_t i = 0; i < blockIds.size(); ++i) { + blockIds[i] = getBlockId(static_cast(i)); + } + CommitBlockListOptions commitBlockListOptions; + commitBlockListOptions.HttpHeaders = options.HttpHeaders; + commitBlockListOptions.Metadata = options.Metadata; + commitBlockListOptions.Tags = options.Tags; + commitBlockListOptions.AccessTier = options.AccessTier; + commitBlockListOptions.ImmutabilityPolicy = options.ImmutabilityPolicy; + commitBlockListOptions.HasLegalHold = options.HasLegalHold; + auto commitBlockListResponse = CommitBlockList(blockIds, commitBlockListOptions, context); + + Models::UploadBlockBlobFromResult result; + result.ETag = commitBlockListResponse.Value.ETag; + result.LastModified = commitBlockListResponse.Value.LastModified; + result.VersionId = commitBlockListResponse.Value.VersionId; + result.IsServerEncrypted = commitBlockListResponse.Value.IsServerEncrypted; + result.EncryptionKeySha256 = commitBlockListResponse.Value.EncryptionKeySha256; + result.EncryptionScope = commitBlockListResponse.Value.EncryptionScope; + return Azure::Response(std::move(result), + std::move(commitBlockListResponse.RawResponse)); +} + +Azure::Response TDBlockBlobClient::UploadFromUri( + const std::string& sourceUri, const UploadBlockBlobFromUriOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::UploadBlockBlobFromUriOptions protocolLayerOptions; + protocolLayerOptions.CopySource = sourceUri; + protocolLayerOptions.CopySourceBlobProperties = options.CopySourceBlobProperties; + protocolLayerOptions.BlobContentType = options.HttpHeaders.ContentType; + protocolLayerOptions.BlobContentEncoding = options.HttpHeaders.ContentEncoding; + protocolLayerOptions.BlobContentLanguage = options.HttpHeaders.ContentLanguage; + protocolLayerOptions.BlobContentMD5 = options.HttpHeaders.ContentHash.Value; + protocolLayerOptions.BlobCacheControl = options.HttpHeaders.CacheControl; + protocolLayerOptions.BlobContentDisposition = options.HttpHeaders.ContentDisposition; + protocolLayerOptions.Metadata = std::map(options.Metadata.begin(), options.Metadata.end()); + protocolLayerOptions.BlobTagsString = _detail::TagsToString(options.Tags); + protocolLayerOptions.Tier = options.AccessTier; + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + protocolLayerOptions.IfMatch = options.AccessConditions.IfMatch; + protocolLayerOptions.IfNoneMatch = options.AccessConditions.IfNoneMatch; + protocolLayerOptions.IfModifiedSince = options.AccessConditions.IfModifiedSince; + protocolLayerOptions.IfUnmodifiedSince = options.AccessConditions.IfUnmodifiedSince; + protocolLayerOptions.IfTags = options.AccessConditions.TagConditions; + protocolLayerOptions.SourceIfMatch = options.SourceAccessConditions.IfMatch; + protocolLayerOptions.SourceIfNoneMatch = options.SourceAccessConditions.IfNoneMatch; + protocolLayerOptions.SourceIfModifiedSince = options.SourceAccessConditions.IfModifiedSince; + protocolLayerOptions.SourceIfUnmodifiedSince = options.SourceAccessConditions.IfUnmodifiedSince; + protocolLayerOptions.SourceIfTags = options.SourceAccessConditions.TagConditions; + if (options.TransactionalContentHash.HasValue()) { + if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Md5) { + protocolLayerOptions.SourceContentMD5 = options.TransactionalContentHash.Value().Value; + } else if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Crc64) { + protocolLayerOptions.SourceContentcrc64 = options.TransactionalContentHash.Value().Value; + } + } + if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); + } + protocolLayerOptions.EncryptionScope = m_encryptionScope; + protocolLayerOptions.CopySourceTags = options.CopySourceTagsMode; + if (!options.SourceAuthorization.empty()) { + protocolLayerOptions.CopySourceAuthorization = options.SourceAuthorization; + } + + return _detail::BlockBlobClient::UploadFromUri(*m_pipeline, m_blobUrl, protocolLayerOptions, context); +} + +Azure::Response TDBlockBlobClient::StageBlock(const std::string& blockId, + Azure::Core::IO::BodyStream& content, + const StageBlockOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::StageBlockBlobBlockOptions protocolLayerOptions; + protocolLayerOptions.BlockId = blockId; + if (options.TransactionalContentHash.HasValue()) { + if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Md5) { + protocolLayerOptions.TransactionalContentMD5 = options.TransactionalContentHash.Value().Value; + } else if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Crc64) { + protocolLayerOptions.TransactionalContentCrc64 = options.TransactionalContentHash.Value().Value; + } + } + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); + } + protocolLayerOptions.EncryptionScope = m_encryptionScope; + return _detail::BlockBlobClient::StageBlock(*m_pipeline, m_blobUrl, content, protocolLayerOptions, context); +} + +Azure::Response TDBlockBlobClient::StageBlockFromUri( + const std::string& blockId, const std::string& sourceUri, const StageBlockFromUriOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::StageBlockBlobBlockFromUriOptions protocolLayerOptions; + protocolLayerOptions.BlockId = blockId; + protocolLayerOptions.SourceUrl = sourceUri; + if (options.SourceRange.HasValue()) { + std::string rangeStr = "bytes=" + std::to_string(options.SourceRange.Value().Offset) + "-"; + if (options.SourceRange.Value().Length.HasValue()) { + rangeStr += std::to_string(options.SourceRange.Value().Offset + options.SourceRange.Value().Length.Value() - 1); + } + protocolLayerOptions.SourceRange = rangeStr; + } + if (options.TransactionalContentHash.HasValue()) { + if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Md5) { + protocolLayerOptions.SourceContentMD5 = options.TransactionalContentHash.Value().Value; + } else if (options.TransactionalContentHash.Value().Algorithm == HashAlgorithm::Crc64) { + protocolLayerOptions.SourceContentcrc64 = options.TransactionalContentHash.Value().Value; + } + } + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + protocolLayerOptions.SourceIfModifiedSince = options.SourceAccessConditions.IfModifiedSince; + protocolLayerOptions.SourceIfUnmodifiedSince = options.SourceAccessConditions.IfUnmodifiedSince; + protocolLayerOptions.SourceIfMatch = options.SourceAccessConditions.IfMatch; + protocolLayerOptions.SourceIfNoneMatch = options.SourceAccessConditions.IfNoneMatch; + if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); + } + protocolLayerOptions.EncryptionScope = m_encryptionScope; + if (!options.SourceAuthorization.empty()) { + protocolLayerOptions.CopySourceAuthorization = options.SourceAuthorization; + } + + return _detail::BlockBlobClient::StageBlockFromUri(*m_pipeline, m_blobUrl, protocolLayerOptions, context); +} + +Azure::Response TDBlockBlobClient::CommitBlockList( + const std::vector& blockIds, const CommitBlockListOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::CommitBlockBlobBlockListOptions protocolLayerOptions; + protocolLayerOptions.Blocks.Latest = blockIds; + protocolLayerOptions.BlobContentType = options.HttpHeaders.ContentType; + protocolLayerOptions.BlobContentEncoding = options.HttpHeaders.ContentEncoding; + protocolLayerOptions.BlobContentLanguage = options.HttpHeaders.ContentLanguage; + protocolLayerOptions.BlobContentMD5 = options.HttpHeaders.ContentHash.Value; + protocolLayerOptions.BlobContentDisposition = options.HttpHeaders.ContentDisposition; + protocolLayerOptions.BlobCacheControl = options.HttpHeaders.CacheControl; + protocolLayerOptions.Metadata = std::map(options.Metadata.begin(), options.Metadata.end()); + protocolLayerOptions.BlobTagsString = _detail::TagsToString(options.Tags); + protocolLayerOptions.Tier = options.AccessTier; + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + protocolLayerOptions.IfModifiedSince = options.AccessConditions.IfModifiedSince; + protocolLayerOptions.IfUnmodifiedSince = options.AccessConditions.IfUnmodifiedSince; + protocolLayerOptions.IfMatch = options.AccessConditions.IfMatch; + protocolLayerOptions.IfNoneMatch = options.AccessConditions.IfNoneMatch; + protocolLayerOptions.IfTags = options.AccessConditions.TagConditions; + if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); + } + protocolLayerOptions.EncryptionScope = m_encryptionScope; + if (options.ImmutabilityPolicy.HasValue()) { + protocolLayerOptions.ImmutabilityPolicyExpiry = options.ImmutabilityPolicy.Value().ExpiresOn; + protocolLayerOptions.ImmutabilityPolicyMode = options.ImmutabilityPolicy.Value().PolicyMode; + } + protocolLayerOptions.LegalHold = options.HasLegalHold; + + return _detail::BlockBlobClient::CommitBlockList(*m_pipeline, m_blobUrl, protocolLayerOptions, context); +} + +Azure::Response TDBlockBlobClient::GetBlockList(const GetBlockListOptions& options, + const Azure::Core::Context& context) const { + _detail::BlockBlobClient::GetBlockBlobBlockListOptions protocolLayerOptions; + protocolLayerOptions.ListType = options.ListType; + protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; + protocolLayerOptions.IfTags = options.AccessConditions.TagConditions; + return _detail::BlockBlobClient::GetBlockList(*m_pipeline, m_blobUrl, protocolLayerOptions, + _internal::WithReplicaStatus(context)); +} +/* +Azure::Response TDBlockBlobClient::Query(const std::string& querySqlExpression, + const QueryBlobOptions& options, + const Azure::Core::Context& context) const { +_detail::BlobClient::QueryBlobOptions protocolLayerOptions; +protocolLayerOptions.QueryRequest.QueryType = Models::_detail::QueryRequestQueryType::SQL; +protocolLayerOptions.QueryRequest.Expression = querySqlExpression; +if (options.InputTextConfiguration.m_format == Models::_detail::QueryFormatType::Delimited) { + Models::_detail::DelimitedTextConfiguration c; + c.RecordSeparator = options.InputTextConfiguration.m_recordSeparator; + c.ColumnSeparator = options.InputTextConfiguration.m_columnSeparator; + c.FieldQuote = options.InputTextConfiguration.m_quotationCharacter; + c.EscapeChar = options.InputTextConfiguration.m_escapeCharacter; + c.HeadersPresent = options.InputTextConfiguration.m_hasHeaders; + Models::_detail::QuerySerialization q; + q.Format.Type = options.InputTextConfiguration.m_format; + q.Format.DelimitedTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.InputSerialization = std::move(q); +} else if (options.InputTextConfiguration.m_format == Models::_detail::QueryFormatType::Json) { + Models::_detail::JsonTextConfiguration c; + c.RecordSeparator = options.InputTextConfiguration.m_recordSeparator; + Models::_detail::QuerySerialization q; + q.Format.Type = options.InputTextConfiguration.m_format; + q.Format.JsonTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.InputSerialization = std::move(q); +} else if (options.InputTextConfiguration.m_format == Models::_detail::QueryFormatType::Parquet) { + Models::_detail::ParquetConfiguration c; + Models::_detail::QuerySerialization q; + q.Format.Type = options.InputTextConfiguration.m_format; + q.Format.ParquetTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.InputSerialization = std::move(q); +} else if (options.InputTextConfiguration.m_format.ToString().empty()) { +} else { + AZURE_UNREACHABLE_CODE(); +} +if (options.OutputTextConfiguration.m_format == Models::_detail::QueryFormatType::Delimited) { + Models::_detail::DelimitedTextConfiguration c; + c.RecordSeparator = options.OutputTextConfiguration.m_recordSeparator; + c.ColumnSeparator = options.OutputTextConfiguration.m_columnSeparator; + c.FieldQuote = options.OutputTextConfiguration.m_quotationCharacter; + c.EscapeChar = options.OutputTextConfiguration.m_escapeCharacter; + c.HeadersPresent = options.OutputTextConfiguration.m_hasHeaders; + Models::_detail::QuerySerialization q; + q.Format.Type = options.OutputTextConfiguration.m_format; + q.Format.DelimitedTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.OutputSerialization = std::move(q); +} else if (options.OutputTextConfiguration.m_format == Models::_detail::QueryFormatType::Json) { + Models::_detail::JsonTextConfiguration c; + c.RecordSeparator = options.OutputTextConfiguration.m_recordSeparator; + Models::_detail::QuerySerialization q; + q.Format.Type = options.OutputTextConfiguration.m_format; + q.Format.JsonTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.OutputSerialization = std::move(q); +} else if (options.OutputTextConfiguration.m_format == Models::_detail::QueryFormatType::Parquet) { + Models::_detail::ParquetConfiguration c; + Models::_detail::QuerySerialization q; + q.Format.Type = options.OutputTextConfiguration.m_format; + q.Format.ParquetTextConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.OutputSerialization = std::move(q); +} else if (options.OutputTextConfiguration.m_format == Models::_detail::QueryFormatType::Arrow) { + Models::_detail::ArrowConfiguration c; + c.Schema = options.OutputTextConfiguration.m_schema; + Models::_detail::QuerySerialization q; + q.Format.Type = options.OutputTextConfiguration.m_format; + q.Format.ArrowConfiguration = std::move(c); + protocolLayerOptions.QueryRequest.OutputSerialization = std::move(q); +} else if (options.InputTextConfiguration.m_format.ToString().empty()) { +} else { + AZURE_UNREACHABLE_CODE(); +} + +protocolLayerOptions.LeaseId = options.AccessConditions.LeaseId; +if (m_customerProvidedKey.HasValue()) { + protocolLayerOptions.EncryptionKey = m_customerProvidedKey.Value().Key; + protocolLayerOptions.EncryptionKeySha256 = m_customerProvidedKey.Value().KeyHash; + protocolLayerOptions.EncryptionAlgorithm = m_customerProvidedKey.Value().Algorithm.ToString(); +} +protocolLayerOptions.EncryptionScope = m_encryptionScope; +protocolLayerOptions.IfModifiedSince = options.AccessConditions.IfModifiedSince; +protocolLayerOptions.IfUnmodifiedSince = options.AccessConditions.IfUnmodifiedSince; +protocolLayerOptions.IfMatch = options.AccessConditions.IfMatch; +protocolLayerOptions.IfNoneMatch = options.AccessConditions.IfNoneMatch; +protocolLayerOptions.IfTags = options.AccessConditions.TagConditions; +auto response = + _detail::BlobClient::Query(*m_pipeline, m_blobUrl, protocolLayerOptions, _internal::WithReplicaStatus(context)); + +const auto statusCode = response.RawResponse->GetStatusCode(); +const auto reasonPhrase = response.RawResponse->GetReasonPhrase(); +const auto requestId = response.RawResponse->GetHeaders().count(_internal::HttpHeaderRequestId) != 0 + ? response.RawResponse->GetHeaders().at(_internal::HttpHeaderRequestId) + : std::string(); + +const auto clientRequestId = response.RawResponse->GetHeaders().count(_internal::HttpHeaderClientRequestId) != 0 + ? response.RawResponse->GetHeaders().at(_internal::HttpHeaderClientRequestId) + : std::string(); + +auto defaultErrorHandler = [statusCode, reasonPhrase, requestId, clientRequestId](BlobQueryError e) { + if (e.IsFatal) { + StorageException exception("Fatal " + e.Name + " at " + std::to_string(e.Position)); + exception.StatusCode = statusCode; + exception.ReasonPhrase = reasonPhrase; + exception.RequestId = requestId; + exception.ClientRequestId = clientRequestId; + exception.ErrorCode = e.Name; + exception.Message = e.Description; + + throw exception; + } +}; + +response.Value.BodyStream = + std::make_unique<_detail::AvroStreamParser>(std::move(response.Value.BodyStream), options.ProgressHandler, + options.ErrorHandler ? options.ErrorHandler : defaultErrorHandler); +return response; +} +*/ +} // namespace Blobs +} // namespace Storage +} // namespace Azure + +#endif diff --git a/source/libs/azure/test/CMakeLists.txt b/source/libs/azure/test/CMakeLists.txt new file mode 100644 index 0000000000..01570df730 --- /dev/null +++ b/source/libs/azure/test/CMakeLists.txt @@ -0,0 +1,18 @@ +aux_source_directory(. AZ_TEST_SRC) + +add_executable(azTest ${AZ_TEST_SRC}) +target_include_directories(azTest + PUBLIC + "${TD_SOURCE_DIR}/include/libs/azure" + "${CMAKE_CURRENT_SOURCE_DIR}/../inc" +) + +target_link_libraries(azTest + az + gtest_main +) +enable_testing() +add_test( + NAME az_test + COMMAND azTest +) diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp new file mode 100644 index 0000000000..9e963508f8 --- /dev/null +++ b/source/libs/azure/test/azTest.cpp @@ -0,0 +1,457 @@ +#include +#include +#include +#include +/* +#include "walInt.h" + +const char* ranStr = "tvapq02tcp"; +const int ranStrLen = strlen(ranStr); +SWalSyncInfo syncMeta = {0}; + +class WalCleanEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + + static void TearDownTestCase() { walCleanUp(); } + + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; + +class WalCleanDeleteEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + + static void TearDownTestCase() { walCleanUp(); } + + void SetUp() override { + taosRemoveDir(pathName); + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; + +class WalKeepEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + + static void TearDownTestCase() { walCleanUp(); } + + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + + void SetUp() override { + SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); + memset(pCfg, 0, sizeof(SWalCfg)); + pCfg->rollPeriod = -1; + pCfg->segSize = -1; + pCfg->retentionPeriod = 0; + pCfg->retentionSize = 0; + pCfg->level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, pCfg); + taosMemoryFree(pCfg); + ASSERT(pWal != NULL); + } + + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; + +class WalRetentionEnv : public ::testing::Test { + protected: + static void SetUpTestCase() { + int code = walInit(NULL); + ASSERT(code == 0); + } + + static void TearDownTestCase() { walCleanUp(); } + + void walResetEnv() { + TearDown(); + taosRemoveDir(pathName); + SetUp(); + } + + void SetUp() override { + SWalCfg cfg; + cfg.rollPeriod = -1; + cfg.segSize = -1; + cfg.retentionPeriod = -1; + cfg.retentionSize = 0; + cfg.rollPeriod = 0; + cfg.vgId = 0; + cfg.level = TAOS_WAL_FSYNC; + pWal = walOpen(pathName, &cfg); + ASSERT(pWal != NULL); + } + + void TearDown() override { + walClose(pWal); + pWal = NULL; + } + + SWal* pWal = NULL; + const char* pathName = TD_TMP_DIR_PATH "wal_test"; +}; + +TEST_F(WalCleanEnv, createNew) { + walRollFileInfo(pWal); + ASSERT(pWal->fileInfoSet != NULL); + ASSERT_EQ(pWal->fileInfoSet->size, 1); + SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); + ASSERT_EQ(pInfo->firstVer, 0); + ASSERT_EQ(pInfo->lastVer, -1); + ASSERT_EQ(pInfo->closeTs, -1); + ASSERT_EQ(pInfo->fileSize, 0); +} + +TEST_F(WalCleanEnv, serialize) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + char* ss = NULL; + code = walMetaSerialize(pWal, &ss); + ASSERT(code == 0); + printf("%s\n", ss); + taosMemoryFree(ss); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} + +TEST_F(WalCleanEnv, removeOldMeta) { + int code = walRollFileInfo(pWal); + ASSERT(code == 0); + ASSERT(pWal->fileInfoSet != NULL); + code = walSaveMeta(pWal); + ASSERT(code == 0); + code = walRollFileInfo(pWal); + ASSERT(code == 0); + code = walSaveMeta(pWal); + ASSERT(code == 0); +} + +TEST_F(WalKeepEnv, readOldMeta) { + walResetEnv(); + int code; + + syncMeta.isWeek = -1; + syncMeta.seqNum = UINT64_MAX; + syncMeta.term = UINT64_MAX; + + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + char* oldss = NULL; + code = walMetaSerialize(pWal, &oldss); + ASSERT(code == 0); + + TearDown(); + SetUp(); + + ASSERT_EQ(pWal->vers.firstVer, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + + char* newss = NULL; + code = walMetaSerialize(pWal, &newss); + ASSERT(code == 0); + + int len = strlen(oldss); + ASSERT_EQ(len, strlen(newss)); + for (int i = 0; i < len; i++) { + EXPECT_EQ(oldss[i], newss[i]); + } + taosMemoryFree(oldss); + taosMemoryFree(newss); +} + +TEST_F(WalCleanEnv, write) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} + +TEST_F(WalCleanEnv, rollback) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 5); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 4); + code = walRollback(pWal, 3); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 2); + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} + +TEST_F(WalCleanEnv, rollbackMultiFile) { + int code; + for (int i = 0; i < 10; i++) { + code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + if (i == 5) { + walBeginSnapshot(pWal, i, 0); + walEndSnapshot(pWal); + } + } + code = walRollback(pWal, 12); + ASSERT_NE(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 9); + code = walRollback(pWal, 9); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 8); + code = walRollback(pWal, 6); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 5); + code = walRollback(pWal, 5); + ASSERT_NE(code, 0); + + ASSERT_EQ(pWal->vers.lastVer, 5); + + code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, 6); + + code = walSaveMeta(pWal); + ASSERT_EQ(code, 0); +} + +TEST_F(WalCleanDeleteEnv, roll) { + int code; + int i; + for (i = 0; i < 100; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + ASSERT_EQ(pWal->vers.lastVer, i); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + + walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); + walEndSnapshot(pWal); + ASSERT_EQ(pWal->vers.snapshotVer, i - 1); + ASSERT_EQ(pWal->vers.verInSnapshotting, -1); + + code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_NE(code, 0); + + for (; i < 200; i++) { + code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); + ASSERT_EQ(code, 0); + code = walCommit(pWal, i); + ASSERT_EQ(pWal->vers.commitVer, i); + } + + code = walBeginSnapshot(pWal, i - 1, 0); + ASSERT_EQ(code, 0); + code = walEndSnapshot(pWal); + ASSERT_EQ(code, 0); +} + +TEST_F(WalKeepEnv, readHandleRead) { + walResetEnv(); + int code; + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} + +TEST_F(WalRetentionEnv, repairMeta1) { + walResetEnv(); + int code; + + int i; + for (i = 0; i < 100; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + + TearDown(); + + // getchar(); + char buf[100]; + sprintf(buf, "%s/meta-ver%d", pathName, 0); + taosRemoveFile(buf); + sprintf(buf, "%s/meta-ver%d", pathName, 1); + taosRemoveFile(buf); + SetUp(); + // getchar(); + + ASSERT_EQ(pWal->vers.lastVer, 99); + + SWalReader* pRead = walOpenReader(pWal, NULL, 0); + ASSERT(pRead != NULL); + + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 100; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + + for (i = 100; i < 200; i++) { + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, i); + int len = strlen(newStr); + code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); + ASSERT_EQ(code, 0); + } + + for (int i = 0; i < 1000; i++) { + int ver = taosRand() % 200; + code = walReadVer(pRead, ver); + ASSERT_EQ(code, 0); + + // printf("rrbody: \n"); + // for(int i = 0; i < pRead->pHead->head.len; i++) { + // printf("%d ", pRead->pHead->head.body[i]); + //} + // printf("\n"); + + ASSERT_EQ(pRead->pHead->head.version, ver); + ASSERT_EQ(pRead->curVersion, ver + 1); + char newStr[100]; + sprintf(newStr, "%s-%d", ranStr, ver); + int len = strlen(newStr); + ASSERT_EQ(pRead->pHead->head.bodyLen, len); + for (int j = 0; j < len; j++) { + EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); + } + } + walCloseReader(pRead); +} +*/ diff --git a/source/libs/tcs/CMakeLists.txt b/source/libs/tcs/CMakeLists.txt index 4d74dedcd0..1c914a18b9 100644 --- a/source/libs/tcs/CMakeLists.txt +++ b/source/libs/tcs/CMakeLists.txt @@ -9,7 +9,7 @@ target_include_directories( target_link_libraries( tcs - # PUBLIC az + PUBLIC az PUBLIC common # PUBLIC cjson # PUBLIC os diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index c5c68c4933..5facffa4ac 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -19,7 +19,7 @@ #include "taoserror.h" #include "tglobal.h" -//#include "az.h" +#include "az.h" #include "cos.h" extern int8_t tsS3Ablob; @@ -68,7 +68,6 @@ int32_t tcsInit() { tcs.DeleteObjects = s3DeleteObjects; tcs.GetObjectToFile = s3GetObjectToFile; } else if (TOS_PROTO_ABLOB == proto) { - /* tcs.Begin = azBegin; tcs.End = azEnd; tcs.CheckCfg = azCheckCfg; @@ -82,7 +81,7 @@ int32_t tcsInit() { tcs.GetObjectsByPrefix = azGetObjectsByPrefix; tcs.DeleteObjects = azDeleteObjects; tcs.GetObjectToFile = azGetObjectToFile; - */ + } else { code = TSDB_CODE_INVALID_PARA; return code; From 8b73975455809869955d771e45e06b4bc98ffce8 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 10 Oct 2024 18:54:57 +0800 Subject: [PATCH 010/102] test/ci/scan_file_path: ignore az cpp module --- tests/ci/scan_file_path.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/ci/scan_file_path.py b/tests/ci/scan_file_path.py index 2d4e701012..9244d37456 100644 --- a/tests/ci/scan_file_path.py +++ b/tests/ci/scan_file_path.py @@ -129,7 +129,7 @@ def scan_files_path(source_file_path): def input_files(change_files): # scan_dir_list = ["source", "include", "docs/examples", "tests/script/api", "src/plugins"] scan_dir_list = ["source", "include", "docs/examples", "src/plugins"] - scan_skip_file_list = [f"{TD_project_path}/TDinternal/community/tools/taosws-rs/target/release/build/openssl-sys-7811e597b848e397/out/openssl-build/install/include/openssl", "/test/", "contrib", "debug", "deps", f"{TD_project_path}/TDinternal/community/source/libs/parser/src/sql.c", f"{TD_project_path}/TDinternal/community/source/client/jni/windows/win32/bridge/AccessBridgeCalls.c"] + scan_skip_file_list = ["tools/taosws-rs/target/release/build/openssl-sys-7811e597b848e397/out/openssl-build/install/include/openssl", "/test/", "contrib", "debug", "deps", "source/libs/parser/src/sql.c", "source/libs/azure", "source/client/jni/windows/win32/bridge/AccessBridgeCalls.c"] with open(change_files, 'r') as file: for line in file: file_name = line.strip() @@ -141,7 +141,7 @@ def input_files(change_files): tdc_file_path = os.path.join(TD_project_path, "community/") file_name = os.path.join(tdc_file_path, file_name) all_file_path.append(file_name) - # print(f"all_file_path:{all_file_path}") + print(f"all_file_path:{all_file_path}") logger.info("Found %s files" % len(all_file_path)) file_res_path = "" From b0625b0ab2cc9e737d69f77495c96da9f8c623c5 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 11 Oct 2024 08:38:44 +0800 Subject: [PATCH 011/102] test system return 1 --- source/libs/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/libs/CMakeLists.txt b/source/libs/CMakeLists.txt index 033582f2c0..41a1e99521 100644 --- a/source/libs/CMakeLists.txt +++ b/source/libs/CMakeLists.txt @@ -23,5 +23,5 @@ add_subdirectory(planner) add_subdirectory(qworker) add_subdirectory(geometry) add_subdirectory(command) -add_subdirectory(azure) +#add_subdirectory(azure) add_subdirectory(tcs) From 7792906622eec722628ae62ba043cfa0f9b90dab Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 11 Oct 2024 08:55:27 +0800 Subject: [PATCH 012/102] test az system 1 --- source/libs/CMakeLists.txt | 2 +- source/libs/azure/src/az.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/source/libs/CMakeLists.txt b/source/libs/CMakeLists.txt index 41a1e99521..033582f2c0 100644 --- a/source/libs/CMakeLists.txt +++ b/source/libs/CMakeLists.txt @@ -23,5 +23,5 @@ add_subdirectory(planner) add_subdirectory(qworker) add_subdirectory(geometry) add_subdirectory(command) -#add_subdirectory(azure) +add_subdirectory(azure) add_subdirectory(tcs) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 83cba1c877..39aadb11b3 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -21,7 +21,7 @@ #include "taoserror.h" #include "tglobal.h" -#if defined(USE_S3) +#if !defined(USE_S3) #include #include From 4d0ca2c4d4453b0248680c25d6cc67c80d76572e Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 11 Oct 2024 13:44:59 +0800 Subject: [PATCH 013/102] rsync: return 0 if no errno --- source/common/src/rsync.c | 8 ++++++-- source/libs/azure/src/az.cpp | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/source/common/src/rsync.c b/source/common/src/rsync.c index 47a452eab7..b5ffae6845 100644 --- a/source/common/src/rsync.c +++ b/source/common/src/rsync.c @@ -160,7 +160,11 @@ int32_t startRsync() { code = system(cmd); if (code != 0) { uError("[rsync] cmd:%s start server failed, code:%d," ERRNO_ERR_FORMAT, cmd, code, ERRNO_ERR_DATA); - code = TAOS_SYSTEM_ERROR(errno); + if (errno == 0) { + return 0; + } else { + code = TAOS_SYSTEM_ERROR(errno); + } } else { uInfo("[rsync] cmd:%s start server successful", cmd); } @@ -358,4 +362,4 @@ int32_t deleteRsync(const char* id) { uDebug("[rsync] delete data:%s successful", id); return 0; -} \ No newline at end of file +} diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 39aadb11b3..83cba1c877 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -21,7 +21,7 @@ #include "taoserror.h" #include "tglobal.h" -#if !defined(USE_S3) +#if defined(USE_S3) #include #include From d081c73a17c3516e0f7f53d20f54ca6a252bd557 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 11 Oct 2024 14:27:58 +0800 Subject: [PATCH 014/102] az: interfaces for stream checkpoints --- source/libs/azure/src/az.cpp | 116 +++++++++++++++++++++++++++++++++-- 1 file changed, 112 insertions(+), 4 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 83cba1c877..453740224d 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -365,13 +365,121 @@ void azDeleteObjectsByPrefix(const char *prefix) { } } -int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp) { return 0; } +int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp) { + int32_t code = 0, lino = 0; + uint64_t contentLength = 0; -int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { return 0; } + if (taosStatFile(file, (int64_t *)&contentLength, NULL, NULL) < 0) { + uError("ERROR: %s Failed to stat file %s: ", __func__, file); + TAOS_RETURN(terrno); + } -int32_t azGetObjectToFile(const char *object_name, const char *fileName) { return 0; } + code = azPutObjectFromFileOffset(file, object, 0, contentLength); + if (code != 0) { + uError("ERROR: %s Failed to put file %s: ", __func__, file); + TAOS_CHECK_GOTO(code, &lino, _exit); + } -int32_t azDeleteObjects(const char *object_name[], int nobject) { return 0; } +_exit: + if (code) { + uError("%s failed at line %d since %s", __func__, lino, tstrerror(code)); + } + + return 0; +} + +int32_t azGetObjectToFile(const char *object_name, const char *fileName) { + int32_t code = TSDB_CODE_SUCCESS; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; + std::string accountURL = tsS3Hostname[0]; + accountURL = "https://" + accountURL; + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = tsS3BucketName; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + TDBlockBlobClient blobClient(containerClient.GetBlobClient(object_name)); + + auto res = blobClient.DownloadTo(fileName); + if (res.Value.ContentRange.Length.Value() <= 0) { + code = TAOS_SYSTEM_ERROR(EIO); + uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); + } + } catch (const Azure::Core::RequestFailedException &e) { + uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + code = TAOS_SYSTEM_ERROR(EIO); + uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + +int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { + const std::string delimiter = "/"; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; + std::string accountURL = tsS3Hostname[0]; + accountURL = "https://" + accountURL; + + try { + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = tsS3BucketName; + auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); + + Azure::Storage::Blobs::ListBlobsOptions options; + options.Prefix = prefix; + + std::set listBlobs; + for (auto pageResult = containerClient.ListBlobs(options); pageResult.HasPage(); pageResult.MoveToNextPage()) { + for (const auto &blob : pageResult.Blobs) { + listBlobs.insert(blob.Name); + } + } + + for (auto blobName : listBlobs) { + const char *tmp = strchr(blobName.c_str(), '/'); + tmp = (tmp == NULL) ? blobName.c_str() : tmp + 1; + char fileName[PATH_MAX] = {0}; + if (path[strlen(path) - 1] != TD_DIRSEP_CHAR) { + (void)snprintf(fileName, PATH_MAX, "%s%s%s", path, TD_DIRSEP, tmp); + } else { + (void)snprintf(fileName, PATH_MAX, "%s%s", path, tmp); + } + if (!azGetObjectToFile(blobName.c_str(), fileName)) { + TAOS_RETURN(TSDB_CODE_FAILED); + } + } + } catch (const Azure::Core::RequestFailedException &e) { + uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + TAOS_RETURN(TSDB_CODE_FAILED); + } + + return 0; +} + +int32_t azDeleteObjects(const char *object_name[], int nobject) { + for (int i = 0; i < nobject; ++i) { + azDeleteObjectsByPrefix(object_name[i]); + } + + return 0; +} #else From 3f403569eb7de14c59c4a6f280a4ded70ade2a76 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 11 Oct 2024 15:57:47 +0800 Subject: [PATCH 015/102] tcs/stream: use tcs interface instead of s3 --- source/libs/stream/CMakeLists.txt | 1 + source/libs/stream/src/streamCheckpoint.c | 62 +++++++++++------------ 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/source/libs/stream/CMakeLists.txt b/source/libs/stream/CMakeLists.txt index b63a8b3900..f08b16f836 100644 --- a/source/libs/stream/CMakeLists.txt +++ b/source/libs/stream/CMakeLists.txt @@ -3,6 +3,7 @@ add_library(stream STATIC ${STREAM_SRC}) target_include_directories( stream PUBLIC "${TD_SOURCE_DIR}/include/libs/stream" + PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) diff --git a/source/libs/stream/src/streamCheckpoint.c b/source/libs/stream/src/streamCheckpoint.c index e44bca123b..3b730e40cc 100644 --- a/source/libs/stream/src/streamCheckpoint.c +++ b/source/libs/stream/src/streamCheckpoint.c @@ -13,10 +13,10 @@ * along with this program. If not, see . */ -#include "cos.h" #include "rsync.h" #include "streamBackendRocksdb.h" #include "streamInt.h" +#include "tcs.h" static int32_t downloadCheckpointDataByName(const char* id, const char* fname, const char* dstName); static int32_t deleteCheckpointFile(const char* id, const char* name); @@ -343,7 +343,7 @@ int32_t streamProcessCheckpointTriggerBlock(SStreamTask* pTask, SStreamDataBlock // And if we don't start a new timer, and the lost of checkpoint-trigger message may cause the whole checkpoint // procedure to be stucked. SStreamTmrInfo* pTmrInfo = &pActiveInfo->chkptTriggerMsgTmr; - int8_t old = atomic_val_compare_exchange_8(&pTmrInfo->isActive, 0, 1); + int8_t old = atomic_val_compare_exchange_8(&pTmrInfo->isActive, 0, 1); if (old == 0) { int32_t ref = atomic_add_fetch_32(&pTask->status.timerActive, 1); stDebug("s-task:%s start checkpoint-trigger monitor in 10s, ref:%d ", pTask->id.idStr, ref); @@ -351,7 +351,7 @@ int32_t streamProcessCheckpointTriggerBlock(SStreamTask* pTask, SStreamDataBlock streamTmrStart(checkpointTriggerMonitorFn, 200, pTask, streamTimer, &pTmrInfo->tmrHandle, vgId, "trigger-recv-monitor"); pTmrInfo->launchChkptId = pActiveInfo->activeId; - } else { // already launched, do nothing + } else { // already launched, do nothing stError("s-task:%s previous checkpoint-trigger monitor tmr is set, not start new one", pTask->id.idStr); } } @@ -372,10 +372,10 @@ int32_t streamProcessCheckpointTriggerBlock(SStreamTask* pTask, SStreamDataBlock if (type == TASK_OUTPUT__FIXED_DISPATCH || type == TASK_OUTPUT__SHUFFLE_DISPATCH) { stDebug("s-task:%s set childIdx:%d, and add checkpoint-trigger block into outputQ", id, pTask->info.selfChildId); - code = continueDispatchCheckpointTriggerBlock(pBlock, pTask); // todo handle this failure + code = continueDispatchCheckpointTriggerBlock(pBlock, pTask); // todo handle this failure } else { // only one task exists, no need to dispatch downstream info - code = appendCheckpointIntoInputQ(pTask, STREAM_INPUT__CHECKPOINT, pActiveInfo->activeId, pActiveInfo->transId, - -1); + code = + appendCheckpointIntoInputQ(pTask, STREAM_INPUT__CHECKPOINT, pActiveInfo->activeId, pActiveInfo->transId, -1); streamFreeQitem((SStreamQueueItem*)pBlock); } } else if (taskLevel == TASK_LEVEL__SINK || taskLevel == TASK_LEVEL__AGG) { @@ -398,8 +398,8 @@ int32_t streamProcessCheckpointTriggerBlock(SStreamTask* pTask, SStreamDataBlock if (taskLevel == TASK_LEVEL__SINK) { stDebug("s-task:%s process checkpoint-trigger block, all %d upstreams sent, send ready msg to upstream", id, num); streamFreeQitem((SStreamQueueItem*)pBlock); - code = streamTaskBuildCheckpoint(pTask); // todo: not handle error yet - } else { // source & agg tasks need to forward the checkpoint msg downwards + code = streamTaskBuildCheckpoint(pTask); // todo: not handle error yet + } else { // source & agg tasks need to forward the checkpoint msg downwards stDebug("s-task:%s process checkpoint-trigger block, all %d upstreams sent, forwards to downstream", id, num); code = flushStateDataInExecutor(pTask, (SStreamQueueItem*)pBlock); if (code) { @@ -444,7 +444,7 @@ static int32_t processCheckpointReadyHelp(SActiveCheckpointInfo* pInfo, int32_t .transId = pInfo->transId, .streamId = streamId, .downstreamNodeId = downstreamNodeId}; - void* p = taosArrayPush(pInfo->pCheckpointReadyRecvList, &info); + void* p = taosArrayPush(pInfo->pCheckpointReadyRecvList, &info); if (p == NULL) { stError("s-task:%s failed to set checkpoint ready recv msg, code:%s", id, tstrerror(terrno)); return terrno; @@ -559,8 +559,8 @@ void streamTaskClearCheckInfo(SStreamTask* pTask, bool clearChkpReadyMsg) { } streamMutexUnlock(&pInfo->lock); - stDebug("s-task:%s clear active checkpointInfo, failed checkpointId:%"PRId64", current checkpointId:%"PRId64, - pTask->id.idStr, pInfo->failedId, pTask->chkInfo.checkpointId); + stDebug("s-task:%s clear active checkpointInfo, failed checkpointId:%" PRId64 ", current checkpointId:%" PRId64, + pTask->id.idStr, pInfo->failedId, pTask->chkInfo.checkpointId); } int32_t streamTaskUpdateTaskCheckpointInfo(SStreamTask* pTask, bool restored, SVUpdateCheckpointInfoReq* pReq) { @@ -574,8 +574,7 @@ int32_t streamTaskUpdateTaskCheckpointInfo(SStreamTask* pTask, bool restored, SV if (pReq->checkpointId <= pInfo->checkpointId) { stDebug("s-task:%s vgId:%d latest checkpointId:%" PRId64 " Ver:%" PRId64 - " no need to update checkpoint info, updated checkpointId:%" PRId64 " Ver:%" PRId64 - " transId:%d ignored", + " no need to update checkpoint info, updated checkpointId:%" PRId64 " Ver:%" PRId64 " transId:%d ignored", id, vgId, pInfo->checkpointId, pInfo->checkpointVer, pReq->checkpointId, pReq->checkpointVer, pReq->transId); streamMutexUnlock(&pTask->lock); @@ -622,7 +621,7 @@ int32_t streamTaskUpdateTaskCheckpointInfo(SStreamTask* pTask, bool restored, SV } bool valid = (pInfo->checkpointId <= pReq->checkpointId && pInfo->checkpointVer <= pReq->checkpointVer && - pInfo->processedVer <= pReq->checkpointVer); + pInfo->processedVer <= pReq->checkpointVer); if (!valid) { stFatal("invalid checkpoint id check, current checkpointId:%" PRId64 " checkpointVer:%" PRId64 @@ -907,7 +906,7 @@ static int32_t doChkptStatusCheck(SStreamTask* pTask) { if (pTmrInfo->launchChkptId != pActiveInfo->activeId) { int32_t ref = streamCleanBeforeQuitTmr(pTmrInfo, pTask); stWarn("s-task:%s vgId:%d checkpoint-trigger retrieve by previous checkpoint procedure, checkpointId:%" PRId64 - ", quit, ref:%d", + ", quit, ref:%d", id, vgId, pTmrInfo->launchChkptId, ref); return -1; } @@ -1004,7 +1003,7 @@ void checkpointTriggerMonitorFn(void* param, void* tmrId) { int32_t numOfNotSend = 0; SActiveCheckpointInfo* pActiveInfo = pTask->chkInfo.pActiveInfo; - SStreamTmrInfo* pTmrInfo = &pActiveInfo->chkptTriggerMsgTmr; + SStreamTmrInfo* pTmrInfo = &pActiveInfo->chkptTriggerMsgTmr; if (pTask->info.taskLevel == TASK_LEVEL__SOURCE) { int32_t ref = streamCleanBeforeQuitTmr(pTmrInfo, pTask); @@ -1022,7 +1021,8 @@ void checkpointTriggerMonitorFn(void* param, void* tmrId) { } if (++pTmrInfo->activeCounter < 50) { - streamTmrStart(checkpointTriggerMonitorFn, 200, pTask, streamTimer, &pTmrInfo->tmrHandle, vgId, "trigger-recv-monitor"); + streamTmrStart(checkpointTriggerMonitorFn, 200, pTask, streamTimer, &pTmrInfo->tmrHandle, vgId, + "trigger-recv-monitor"); return; } @@ -1200,8 +1200,8 @@ int32_t streamTaskInitTriggerDispatchInfo(SStreamTask* pTask) { STaskDispatcherFixed* pDispatch = &pTask->outputInfo.fixedDispatcher; STaskTriggerSendInfo p = {.sendTs = now, .recved = false, .nodeId = pDispatch->nodeId, .taskId = pDispatch->taskId}; - void* px = taosArrayPush(pInfo->pDispatchTriggerList, &p); - if (px == NULL) { // pause the stream task, if memory not enough + void* px = taosArrayPush(pInfo->pDispatchTriggerList, &p); + if (px == NULL) { // pause the stream task, if memory not enough code = terrno; } } else { @@ -1212,8 +1212,8 @@ int32_t streamTaskInitTriggerDispatchInfo(SStreamTask* pTask) { } STaskTriggerSendInfo p = {.sendTs = now, .recved = false, .nodeId = pVgInfo->vgId, .taskId = pVgInfo->taskId}; - void* px = taosArrayPush(pInfo->pDispatchTriggerList, &p); - if (px == NULL) { // pause the stream task, if memory not enough + void* px = taosArrayPush(pInfo->pDispatchTriggerList, &p); + if (px == NULL) { // pause the stream task, if memory not enough code = terrno; break; } @@ -1287,11 +1287,11 @@ void streamTaskSetTriggerDispatchConfirmed(SStreamTask* pTask, int32_t vgId) { static int32_t uploadCheckpointToS3(const char* id, const char* path) { int32_t code = 0; int32_t nBytes = 0; - + /* if (s3Init() != 0) { return TSDB_CODE_THIRDPARTY_ERROR; } - + */ TdDirPtr pDir = taosOpenDir(path); if (pDir == NULL) { return terrno; @@ -1324,11 +1324,11 @@ static int32_t uploadCheckpointToS3(const char* id, const char* path) { break; } - code = s3PutObjectFromFile2(filename, object, 0); + code = tcsPutObjectFromFile2(filename, object, 0); if (code != 0) { - stError("[s3] failed to upload checkpoint:%s, reason:%s", filename, tstrerror(code)); + stError("[tcs] failed to upload checkpoint:%s, reason:%s", filename, tstrerror(code)); } else { - stDebug("[s3] upload checkpoint:%s", filename); + stDebug("[tcs] upload checkpoint:%s", filename); } } @@ -1354,7 +1354,7 @@ int32_t downloadCheckpointByNameS3(const char* id, const char* fname, const char taosMemoryFree(buf); return TSDB_CODE_OUT_OF_RANGE; } - int32_t code = s3GetObjectToFile(buf, dstName); + int32_t code = tcsGetObjectToFile(buf, dstName); if (code != 0) { taosMemoryFree(buf); return TAOS_SYSTEM_ERROR(errno); @@ -1417,7 +1417,7 @@ int32_t streamTaskDownloadCheckpointData(const char* id, char* path, int64_t che if (strlen(tsSnodeAddress) != 0) { return downloadByRsync(id, path, checkpointId); } else if (tsS3StreamEnabled) { - return s3GetObjectsByPrefix(id, path); + return tcsGetObjectsByPrefix(id, path); } return 0; @@ -1431,7 +1431,7 @@ int32_t deleteCheckpoint(const char* id) { if (strlen(tsSnodeAddress) != 0) { return deleteRsync(id); } else if (tsS3StreamEnabled) { - s3DeleteObjectsByPrefix(id); + tcsDeleteObjectsByPrefix(id); } return 0; } @@ -1445,7 +1445,7 @@ int32_t deleteCheckpointFile(const char* id, const char* name) { } char* tmp = object; - int32_t code = s3DeleteObjects((const char**)&tmp, 1); + int32_t code = tcsDeleteObjects((const char**)&tmp, 1); if (code != 0) { return TSDB_CODE_THIRDPARTY_ERROR; } @@ -1487,4 +1487,4 @@ int32_t streamTaskSendCheckpointsourceRsp(SStreamTask* pTask) { streamMutexUnlock(&pTask->lock); return code; -} \ No newline at end of file +} From f6f278d8126ac6db16c5137adb23250ce982c097 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Sat, 12 Oct 2024 16:37:31 +0800 Subject: [PATCH 016/102] test/ablob: integration test form az --- tests/army/storage/blob/ablob.py | 344 ++++++++++++++++++++++++++ tests/army/storage/blob/s3Basic.json | 66 +++++ tests/army/storage/blob/s3Basic1.json | 66 +++++ 3 files changed, 476 insertions(+) create mode 100644 tests/army/storage/blob/ablob.py create mode 100644 tests/army/storage/blob/s3Basic.json create mode 100644 tests/army/storage/blob/s3Basic1.json diff --git a/tests/army/storage/blob/ablob.py b/tests/army/storage/blob/ablob.py new file mode 100644 index 0000000000..fae492a3df --- /dev/null +++ b/tests/army/storage/blob/ablob.py @@ -0,0 +1,344 @@ +################################################################### +# Copyright (c) 2016 by TAOS Technologies, Inc. +# All rights reserved. +# +# This file is proprietary and confidential to TAOS Technologies. +# No part of this file may be reproduced, stored, transmitted, +# disclosed or used in any form or by any means other than as +# expressly provided by the written permission from Jianhui Tao +# +################################################################### + +# -*- coding: utf-8 -*- + +import sys +import time +import random + +import taos +import frame +import frame.etool +import frame.eos +import frame.eutil + +from frame.log import * +from frame.cases import * +from frame.sql import * +from frame.caseBase import * +from frame.srvCtl import * +from frame import * +from frame.eos import * + + +class TDTestCase(TBase): + index = eutil.cpuRand(20) + 1 + bucketName = f"ci-bucket{index}" + updatecfgDict = { + "supportVnodes":"1000", + 's3EndPoint': 'https://.blob.core.windows.net', + 's3AccessKey': ':', + 's3BucketName': '', + 's3PageCacheSize': '10240', + "s3UploadDelaySec": "10", + 's3MigrateIntervalSec': '600', + 's3MigrateEnabled': '1' + } + + tdLog.info(f"assign bucketName is {bucketName}\n") + maxFileSize = (128 + 10) * 1014 * 1024 # add 10M buffer + + def insertData(self): + tdLog.info(f"insert data.") + # taosBenchmark run + json = etool.curFile(__file__, "s3Basic.json") + etool.benchMark(json=json) + + tdSql.execute(f"use {self.db}") + # come from s3_basic.json + self.childtable_count = 6 + self.insert_rows = 2000000 + self.timestamp_step = 100 + + def createStream(self, sname): + sql = f"create stream {sname} fill_history 1 into stm1 as select count(*) from {self.db}.{self.stb} interval(10s);" + tdSql.execute(sql) + + def migrateDbS3(self): + sql = f"s3migrate database {self.db}" + tdSql.execute(sql, show=True) + + def checkDataFile(self, lines, maxFileSize): + # ls -l + # -rwxrwxrwx 1 root root 41652224 Apr 17 14:47 vnode2/tsdb/v2f1974ver47.3.data + overCnt = 0 + for line in lines: + cols = line.split() + fileSize = int(cols[4]) + fileName = cols[8] + #print(f" filesize={fileSize} fileName={fileName} line={line}") + if fileSize > maxFileSize: + tdLog.info(f"error, {fileSize} over max size({maxFileSize}) {fileName}\n") + overCnt += 1 + else: + tdLog.info(f"{fileName}({fileSize}) check size passed.") + + return overCnt + + def checkUploadToS3(self): + rootPath = sc.clusterRootPath() + cmd = f"ls -l {rootPath}/dnode*/data/vnode/vnode*/tsdb/*.data" + tdLog.info(cmd) + loop = 0 + rets = [] + overCnt = 0 + while loop < 200: + time.sleep(3) + + # check upload to s3 + rets = eos.runRetList(cmd) + cnt = len(rets) + if cnt == 0: + overCnt = 0 + tdLog.info("All data file upload to server over.") + break + overCnt = self.checkDataFile(rets, self.maxFileSize) + if overCnt == 0: + uploadOK = True + tdLog.info(f"All data files({len(rets)}) size bellow {self.maxFileSize}, check upload to s3 ok.") + break + + tdLog.info(f"loop={loop} no upload {overCnt} data files wait 3s retry ...") + if loop == 3: + sc.dnodeStop(1) + time.sleep(2) + sc.dnodeStart(1) + loop += 1 + # migrate + self.migrateDbS3() + + # check can pass + if overCnt > 0: + tdLog.exit(f"s3 have {overCnt} files over size.") + + + def doAction(self): + tdLog.info(f"do action.") + + self.flushDb(show=True) + #self.compactDb(show=True) + + # sleep 70s + self.migrateDbS3() + + # check upload to s3 + self.checkUploadToS3() + + def checkStreamCorrect(self): + sql = f"select count(*) from {self.db}.stm1" + count = 0 + for i in range(120): + tdSql.query(sql) + count = tdSql.getData(0, 0) + if count == 100000 or count == 100001: + return True + time.sleep(1) + + tdLog.exit(f"stream count is not expect . expect = 100000 or 100001 real={count} . sql={sql}") + + + def checkCreateDb(self, keepLocal, chunkSize, compact): + # keyword + kw1 = kw2 = kw3 = "" + if keepLocal is not None: + kw1 = f"s3_keeplocal {keepLocal}" + if chunkSize is not None: + kw2 = f"s3_chunksize {chunkSize}" + if compact is not None: + kw3 = f"s3_compact {compact}" + + sql = f" create database db1 vgroups 1 duration 1h {kw1} {kw2} {kw3}" + tdSql.execute(sql, show=True) + #sql = f"select name,s3_keeplocal,s3_chunksize,s3_compact from information_schema.ins_databases where name='db1';" + sql = f"select * from information_schema.ins_databases where name='db1';" + tdSql.query(sql) + # 29 30 31 -> chunksize keeplocal compact + if chunkSize is not None: + tdSql.checkData(0, 29, chunkSize) + if keepLocal is not None: + keepLocalm = keepLocal * 24 * 60 + tdSql.checkData(0, 30, f"{keepLocalm}m") + if compact is not None: + tdSql.checkData(0, 31, compact) + sql = "drop database db1" + tdSql.execute(sql) + + def checkExcept(self): + # errors + sqls = [ + f"create database db2 s3_keeplocal -1", + f"create database db2 s3_keeplocal 0", + f"create database db2 s3_keeplocal 365001", + f"create database db2 s3_chunksize -1", + f"create database db2 s3_chunksize 0", + f"create database db2 s3_chunksize 900000000", + f"create database db2 s3_compact -1", + f"create database db2 s3_compact 100", + f"create database db2 duration 1d s3_keeplocal 1d" + ] + tdSql.errors(sqls) + + + def checkBasic(self): + # create db + keeps = [1, 256, 1024, 365000, None] + chunks = [131072, 600000, 820000, 1048576, None] + comps = [0, 1, None] + + for keep in keeps: + for chunk in chunks: + for comp in comps: + self.checkCreateDb(keep, chunk, comp) + + + # --checks3 + idx = 1 + taosd = sc.taosdFile(idx) + cfg = sc.dnodeCfgPath(idx) + cmd = f"{taosd} -c {cfg} --checks3" + + eos.exe(cmd) + #output, error = eos.run(cmd) + #print(lines) + + ''' + tips = [ + "put object s3test.txt: success", + "listing bucket ci-bucket: success", + "get object s3test.txt: success", + "delete object s3test.txt: success" + ] + pos = 0 + for tip in tips: + pos = output.find(tip, pos) + #if pos == -1: + # tdLog.exit(f"checks3 failed not found {tip}. cmd={cmd} output={output}") + ''' + + # except + self.checkExcept() + + # + def preDb(self, vgroups): + cnt = int(time.time())%2 + 1 + for i in range(cnt): + vg = eutil.cpuRand(9) + 1 + sql = f"create database predb vgroups {vg}" + tdSql.execute(sql, show=True) + sql = "drop database predb" + tdSql.execute(sql, show=True) + + # history + def insertHistory(self): + tdLog.info(f"insert history data.") + # taosBenchmark run + json = etool.curFile(__file__, "s3Basic1.json") + etool.benchMark(json=json) + + # come from s3_basic.json + self.insert_rows += self.insert_rows/4 + self.timestamp_step = 50 + + # delete + def checkDelete(self): + # del 1000 rows + start = 1600000000000 + drows = 200 + for i in range(1, drows, 2): + sql = f"from {self.db}.{self.stb} where ts = {start + i*500}" + tdSql.execute("delete " + sql, show=True) + tdSql.query("select * " + sql) + tdSql.checkRows(0) + + # delete all 500 step + self.flushDb() + self.compactDb() + self.insert_rows -= drows/2 + sql = f"select count(*) from {self.db}.{self.stb}" + tdSql.checkAgg(sql, self.insert_rows * self.childtable_count) + + # delete 10W rows from 100000 + drows = 100000 + sdel = start + 100000 * self.timestamp_step + edel = start + 100000 * self.timestamp_step + drows * self.timestamp_step + sql = f"from {self.db}.{self.stb} where ts >= {sdel} and ts < {edel}" + tdSql.execute("delete " + sql, show=True) + tdSql.query("select * " + sql) + tdSql.checkRows(0) + + self.insert_rows -= drows + sql = f"select count(*) from {self.db}.{self.stb}" + tdSql.checkAgg(sql, self.insert_rows * self.childtable_count) + + + # run + def run(self): + tdLog.debug(f"start to excute {__file__}") + self.sname = "stream1" + if eos.isArm64Cpu(): + tdLog.success(f"{__file__} arm64 ignore executed") + else: + + self.preDb(10) + + # insert data + self.insertData() + + # creat stream + self.createStream(self.sname) + + # check insert data correct + #self.checkInsertCorrect() + + # save + self.snapshotAgg() + + # do action + self.doAction() + + # check save agg result correct + self.checkAggCorrect() + + # check insert correct again + self.checkInsertCorrect() + + + # check stream correct and drop stream + #self.checkStreamCorrect() + + # drop stream + self.dropStream(self.sname) + + # insert history disorder data + self.insertHistory() + + # checkBasic + self.checkBasic() + + #self.checkInsertCorrect() + self.snapshotAgg() + self.doAction() + self.checkAggCorrect() + self.checkInsertCorrect(difCnt=self.childtable_count*1499999) + self.checkDelete() + self.doAction() + + # drop database and free s3 file + self.dropDb() + + + tdLog.success(f"{__file__} successfully executed") + + + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/army/storage/blob/s3Basic.json b/tests/army/storage/blob/s3Basic.json new file mode 100644 index 0000000000..ee341b2096 --- /dev/null +++ b/tests/army/storage/blob/s3Basic.json @@ -0,0 +1,66 @@ +{ + "filetype": "insert", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "connection_pool_size": 8, + "num_of_records_per_req": 4000, + "prepared_rand": 500, + "thread_count": 4, + "create_table_thread_count": 1, + "confirm_parameter_prompt": "no", + "databases": [ + { + "dbinfo": { + "name": "db", + "drop": "yes", + "vgroups": 2, + "replica": 1, + "duration":"10d", + "s3_keeplocal":"30d", + "s3_chunksize":"131072", + "tsdb_pagesize":"1", + "s3_compact":"1", + "wal_retention_size":"1", + "wal_retention_period":"1", + "flush_each_batch":"no", + "keep": "3650d" + }, + "super_tables": [ + { + "name": "stb", + "child_table_exists": "no", + "childtable_count": 6, + "insert_rows": 2000000, + "childtable_prefix": "d", + "insert_mode": "taosc", + "timestamp_step": 100, + "start_timestamp": 1600000000000, + "columns": [ + { "type": "bool", "name": "bc"}, + { "type": "float", "name": "fc" }, + { "type": "double", "name": "dc"}, + { "type": "tinyint", "name": "ti"}, + { "type": "smallint", "name": "si" }, + { "type": "int", "name": "ic" ,"max": 1,"min": 1}, + { "type": "bigint", "name": "bi" }, + { "type": "utinyint", "name": "uti"}, + { "type": "usmallint", "name": "usi"}, + { "type": "uint", "name": "ui" }, + { "type": "ubigint", "name": "ubi"}, + { "type": "binary", "name": "bin", "len": 50}, + { "type": "nchar", "name": "nch", "len": 100} + ], + "tags": [ + {"type": "tinyint", "name": "groupid","max": 10,"min": 1}, + {"name": "location","type": "binary", "len": 16, "values": + ["San Francisco", "Los Angles", "San Diego", "San Jose", "Palo Alto", "Campbell", "Mountain View","Sunnyvale", "Santa Clara", "Cupertino"] + } + ] + } + ] + } + ] +} diff --git a/tests/army/storage/blob/s3Basic1.json b/tests/army/storage/blob/s3Basic1.json new file mode 100644 index 0000000000..02be308443 --- /dev/null +++ b/tests/army/storage/blob/s3Basic1.json @@ -0,0 +1,66 @@ +{ + "filetype": "insert", + "cfgdir": "/etc/taos", + "host": "127.0.0.1", + "port": 6030, + "user": "root", + "password": "taosdata", + "connection_pool_size": 8, + "num_of_records_per_req": 5000, + "prepared_rand": 500, + "thread_count": 4, + "create_table_thread_count": 1, + "confirm_parameter_prompt": "no", + "databases": [ + { + "dbinfo": { + "name": "db", + "drop": "no", + "vgroups": 2, + "replica": 1, + "duration":"10d", + "s3_keeplocal":"30d", + "s3_chunksize":"131072", + "tsdb_pagesize":"1", + "s3_compact":"1", + "wal_retention_size":"1", + "wal_retention_period":"1", + "flush_each_batch":"no", + "keep": "3650d" + }, + "super_tables": [ + { + "name": "stb", + "child_table_exists": "yes", + "childtable_count": 6, + "insert_rows": 1000000, + "childtable_prefix": "d", + "insert_mode": "taosc", + "timestamp_step": 50, + "start_timestamp": 1600000000000, + "columns": [ + { "type": "bool", "name": "bc"}, + { "type": "float", "name": "fc" }, + { "type": "double", "name": "dc"}, + { "type": "tinyint", "name": "ti"}, + { "type": "smallint", "name": "si" }, + { "type": "int", "name": "ic" ,"max": 1,"min": 1}, + { "type": "bigint", "name": "bi" }, + { "type": "utinyint", "name": "uti"}, + { "type": "usmallint", "name": "usi"}, + { "type": "uint", "name": "ui" }, + { "type": "ubigint", "name": "ubi"}, + { "type": "binary", "name": "bin", "len": 50}, + { "type": "nchar", "name": "nch", "len": 100} + ], + "tags": [ + {"type": "tinyint", "name": "groupid","max": 10,"min": 1}, + {"name": "location","type": "binary", "len": 16, "values": + ["San Francisco", "Los Angles", "San Diego", "San Jose", "Palo Alto", "Campbell", "Mountain View","Sunnyvale", "Santa Clara", "Cupertino"] + } + ] + } + ] + } + ] +} From 18cd7fd01cc8459a0e372048132cc247e40dc391 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Mon, 14 Oct 2024 15:34:31 +0800 Subject: [PATCH 017/102] az/get object block: use random 1~3 seconds for retries --- source/libs/azure/src/az.cpp | 50 ++++++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 453740224d..0af669c56a 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -268,18 +268,19 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int TAOS_RETURN(code); } -int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { +int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { int32_t code = TSDB_CODE_SUCCESS; std::string accountName = tsS3AccessKeyId[0]; std::string accountKey = tsS3AccessKeySecret[0]; std::string accountURL = tsS3Hostname[0]; - accountURL = "https://" + accountURL; + uint8_t *buf = NULL; try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + accountURL = "https://" + accountURL; BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); std::string containerName = tsS3BucketName; @@ -287,26 +288,15 @@ int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, TDBlockBlobClient blobClient(containerClient.GetBlobClient(object_name)); - uint8_t *buf = (uint8_t *)taosMemoryCalloc(1, size); - if (!buf) { - return terrno; - } - Blobs::DownloadBlobToOptions options; - // options.TransferOptions.Concurrency = concurrency; - // if (offset.HasValue() || length.HasValue()) { options.Range = Azure::Core::Http::HttpRange(); options.Range.Value().Offset = offset; options.Range.Value().Length = size; - //} - /* - if (initialChunkSize.HasValue()) { - options.TransferOptions.InitialChunkSize = initialChunkSize.Value(); + + buf = (uint8_t *)taosMemoryCalloc(1, size); + if (!buf) { + return terrno; } - if (chunkSize.HasValue()) { - options.TransferOptions.ChunkSize = chunkSize.Value(); - } - */ auto res = blobClient.DownloadTo(buf, size, options); if (check && res.Value.ContentRange.Length.Value() != size) { @@ -321,12 +311,38 @@ int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, e.ReasonPhrase.c_str()); code = TAOS_SYSTEM_ERROR(EIO); uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + + if (buf) { + taosMemoryFree(buf); + } + *ppBlock = NULL; + TAOS_RETURN(code); } TAOS_RETURN(code); } +int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { + int32_t code = TSDB_CODE_SUCCESS; + + // May use an exponential backoff policy for retries with 503 + int retryCount = 0; + static int maxRetryCount = 5; + static int minRetryInterval = 1000; // ms + static int maxRetryInterval = 3000; // ms + +_retry: + code = azGetObjectBlockImpl(object_name, offset, size, check, ppBlock); + if (TSDB_CODE_SUCCESS != code && retryCount++ < maxRetryCount) { + taosMsleep(taosRand() % (maxRetryInterval - minRetryInterval + 1) + minRetryInterval); + uInfo("%s: 0x%x(%s) and retry get object", __func__, code, tstrerror(code)); + goto _retry; + } + + TAOS_RETURN(code); +} + void azDeleteObjectsByPrefix(const char *prefix) { const std::string delimiter = "/"; std::string accountName = tsS3AccessKeyId[0]; From 88d730f7eda456f7be8c7803edcd66f8514045f8 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 15 Oct 2024 12:47:10 +0800 Subject: [PATCH 018/102] az: clean up avro --- source/libs/azure/inc/td_avro_parser.h | 191 +++++++ source/libs/azure/src/td_avro_parser.cpp | 531 ++++++++++++++++++ .../libs/azure/src/td_block_blob_client.cpp | 2 +- 3 files changed, 723 insertions(+), 1 deletion(-) create mode 100644 source/libs/azure/inc/td_avro_parser.h create mode 100644 source/libs/azure/src/td_avro_parser.cpp diff --git a/source/libs/azure/inc/td_avro_parser.h b/source/libs/azure/inc/td_avro_parser.h new file mode 100644 index 0000000000..dae5a65dc7 --- /dev/null +++ b/source/libs/azure/inc/td_avro_parser.h @@ -0,0 +1,191 @@ +#pragma once + +#include "azure/storage/blobs/blob_options.hpp" + +#include + +#include +#include +#include + +namespace Azure { +namespace Storage { +namespace Blobs { +namespace _detail { +enum class AvroDatumType { + String, + Bytes, + Int, + Long, + Float, + Double, + Bool, + Null, + Record, + Enum, + Array, + Map, + Union, + Fixed, +}; + +class AvroStreamReader final { + public: + // position of a vector that lives through vector resizing + struct ReaderPos final { + const std::vector* BufferPtr = nullptr; + size_t Offset = 0; + }; + explicit AvroStreamReader(Core::IO::BodyStream& stream) : m_stream(&stream), m_pos{&m_streambuffer, 0} {} + AvroStreamReader(const AvroStreamReader&) = delete; + AvroStreamReader& operator=(const AvroStreamReader&) = delete; + + int64_t ParseInt(const Core::Context& context); + void Advance(size_t n, const Core::Context& context); + // Read at least n bytes from m_stream and append data to m_streambuffer. Return number of bytes + // available in m_streambuffer; + size_t Preload(size_t n, const Core::Context& context); + size_t TryPreload(size_t n, const Core::Context& context); + // discards data that's before m_pos + void Discard(); + + private: + size_t AvailableBytes() const { return m_streambuffer.size() - m_pos.Offset; } + + private: + Core::IO::BodyStream* m_stream; + std::vector m_streambuffer; + ReaderPos m_pos; + + friend class AvroDatum; +}; + +class AvroSchema final { + public: + static const AvroSchema StringSchema; + static const AvroSchema BytesSchema; + static const AvroSchema IntSchema; + static const AvroSchema LongSchema; + static const AvroSchema FloatSchema; + static const AvroSchema DoubleSchema; + static const AvroSchema BoolSchema; + static const AvroSchema NullSchema; + static AvroSchema RecordSchema(std::string name, const std::vector>& fieldsSchema); + static AvroSchema ArraySchema(AvroSchema elementSchema); + static AvroSchema MapSchema(AvroSchema elementSchema); + static AvroSchema UnionSchema(std::vector schemas); + static AvroSchema FixedSchema(std::string name, int64_t size); + + const std::string& Name() const { return m_name; } + AvroDatumType Type() const { return m_type; } + const std::vector& FieldNames() const { return m_status->m_keys; } + AvroSchema ItemSchema() const { return m_status->m_schemas[0]; } + const std::vector& FieldSchemas() const { return m_status->m_schemas; } + size_t Size() const { return static_cast(m_status->m_size); } + + private: + explicit AvroSchema(AvroDatumType type) : m_type(type) {} + + private: + AvroDatumType m_type; + std::string m_name; + + struct SharedStatus { + std::vector m_keys; + std::vector m_schemas; + int64_t m_size = 0; + }; + std::shared_ptr m_status; +}; + +class AvroDatum final { + public: + AvroDatum() : m_schema(AvroSchema::NullSchema) {} + explicit AvroDatum(AvroSchema schema) : m_schema(std::move(schema)) {} + + void Fill(AvroStreamReader& reader, const Core::Context& context); + void Fill(AvroStreamReader::ReaderPos& data); + + const AvroSchema& Schema() const { return m_schema; } + + template + T Value() const; + struct StringView { + const uint8_t* Data = nullptr; + size_t Length = 0; + }; + + private: + AvroSchema m_schema; + AvroStreamReader::ReaderPos m_data; +}; + +using AvroMap = std::map; + +class AvroRecord final { + public: + bool HasField(const std::string& key) const { return FindField(key) != m_keys->size(); } + const AvroDatum& Field(const std::string& key) const { return m_values.at(FindField(key)); } + AvroDatum& Field(const std::string& key) { return m_values.at(FindField(key)); } + const AvroDatum& FieldAt(size_t i) const { return m_values.at(i); } + AvroDatum& FieldAt(size_t i) { return m_values.at(i); } + + private: + size_t FindField(const std::string& key) const { + auto i = find(m_keys->begin(), m_keys->end(), key); + return i - m_keys->begin(); + } + const std::vector* m_keys = nullptr; + std::vector m_values; + + friend class AvroDatum; +}; + +class AvroObjectContainerReader final { + public: + explicit AvroObjectContainerReader(Core::IO::BodyStream& stream); + + bool End() const { return m_eof; } + // Calling Next() will invalidates the previous AvroDatum returned by this function and all + // AvroDatums propagated from there. + AvroDatum Next(const Core::Context& context) { return NextImpl(m_objectSchema.get(), context); } + + private: + AvroDatum NextImpl(const AvroSchema* schema, const Core::Context& context); + + private: + std::unique_ptr m_reader; + std::unique_ptr m_objectSchema; + std::string m_syncMarker; + int64_t m_remainingObjectInCurrentBlock = 0; + bool m_eof = false; +}; + +class AvroStreamParser final : public Core::IO::BodyStream { + public: + explicit AvroStreamParser(std::unique_ptr inner, + std::function progressCallback, + std::function errorCallback) + : m_inner(std::move(inner)), + m_parser(*m_inner), + m_progressCallback(std::move(progressCallback)), + m_errorCallback(std::move(errorCallback)) {} + + int64_t Length() const override { return -1; } + void Rewind() override { this->m_inner->Rewind(); } + + private: + size_t OnRead(uint8_t* buffer, size_t count, const Azure::Core::Context& context) override; + + private: + std::unique_ptr m_inner; + AvroObjectContainerReader m_parser; + std::function m_progressCallback; + std::function m_errorCallback; + AvroDatum::StringView m_parserBuffer; +}; + +} // namespace _detail +} // namespace Blobs +} // namespace Storage +} // namespace Azure diff --git a/source/libs/azure/src/td_avro_parser.cpp b/source/libs/azure/src/td_avro_parser.cpp new file mode 100644 index 0000000000..485980e007 --- /dev/null +++ b/source/libs/azure/src/td_avro_parser.cpp @@ -0,0 +1,531 @@ +#if defined(USE_S3) +#include "avro_parser.hpp" + +#include +#include + +#include +#include + +namespace Azure { +namespace Storage { +namespace Blobs { +namespace _detail { + +namespace { +int64_t parseInt(AvroStreamReader::ReaderPos& data) { + uint64_t r = 0; + int nb = 0; + while (true) { + uint8_t c = (*data.BufferPtr)[data.Offset++]; + r = r | ((static_cast(c) & 0x7f) << (nb * 7)); + if (c & 0x80) { + ++nb; + continue; + } + break; + } + return static_cast(r >> 1) ^ -static_cast(r & 0x01); +} + +AvroSchema ParseSchemaFromJsonString(const std::string& jsonSchema) { + const static std::map BuiltinNameSchemaMap = { + {"string", AvroSchema::StringSchema}, {"bytes", AvroSchema::BytesSchema}, {"int", AvroSchema::IntSchema}, + {"long", AvroSchema::LongSchema}, {"float", AvroSchema::FloatSchema}, {"double", AvroSchema::DoubleSchema}, + {"boolean", AvroSchema::BoolSchema}, {"null", AvroSchema::NullSchema}, {"string", AvroSchema::StringSchema}, + }; + std::map nameSchemaMap = BuiltinNameSchemaMap; + + std::function parseSchemaFromJsonObject; + parseSchemaFromJsonObject = [&](const Core::Json::_internal::json& obj) -> AvroSchema { + if (obj.is_string()) { + auto typeName = obj.get(); + return nameSchemaMap.find(typeName)->second; + } else if (obj.is_array()) { + std::vector unionSchemas; + for (const auto& s : obj) { + unionSchemas.push_back(parseSchemaFromJsonObject(s)); + } + return AvroSchema::UnionSchema(std::move(unionSchemas)); + } else if (obj.is_object()) { + if (obj.count("namespace") != 0) { + throw std::runtime_error("Namespace isn't supported yet in Avro schema."); + } + if (obj.count("aliases") != 0) { + throw std::runtime_error("Alias isn't supported yet in Avro schema."); + } + auto typeName = obj["type"].get(); + auto i = nameSchemaMap.find(typeName); + if (i != nameSchemaMap.end()) { + return i->second; + } + if (typeName == "record") { + std::vector> fieldsSchema; + for (const auto& field : obj["fields"]) { + fieldsSchema.push_back( + std::make_pair(field["name"].get(), parseSchemaFromJsonObject(field["type"]))); + } + + const std::string recordName = obj["name"].get(); + auto recordSchema = AvroSchema::RecordSchema(recordName, std::move(fieldsSchema)); + nameSchemaMap.insert(std::make_pair(recordName, recordSchema)); + return recordSchema; + } else if (typeName == "enum") { + throw std::runtime_error("Enum type isn't supported yet in Avro schema."); + } else if (typeName == "array") { + return AvroSchema::ArraySchema(parseSchemaFromJsonObject(obj["items"])); + } else if (typeName == "map") { + return AvroSchema::MapSchema(parseSchemaFromJsonObject(obj["items"])); + } else if (typeName == "fixed") { + const std::string fixedName = obj["name"].get(); + auto fixedSchema = AvroSchema::FixedSchema(fixedName, obj["size"].get()); + nameSchemaMap.insert(std::make_pair(fixedName, fixedSchema)); + return fixedSchema; + } else { + throw std::runtime_error("Unrecognized type " + typeName + " in Avro schema."); + } + } + AZURE_UNREACHABLE_CODE(); + }; + + auto jsonRoot = Core::Json::_internal::json::parse(jsonSchema.begin(), jsonSchema.end()); + return parseSchemaFromJsonObject(jsonRoot); +} +} // namespace + +int64_t AvroStreamReader::ParseInt(const Core::Context& context) { + uint64_t r = 0; + int nb = 0; + while (true) { + Preload(1, context); + uint8_t c = m_streambuffer[m_pos.Offset++]; + + r = r | ((static_cast(c) & 0x7f) << (nb * 7)); + if (c & 0x80) { + ++nb; + continue; + } + break; + } + return static_cast(r >> 1) ^ -static_cast(r & 0x01); +} + +void AvroStreamReader::Advance(size_t n, const Core::Context& context) { + Preload(n, context); + m_pos.Offset += n; +} + +size_t AvroStreamReader::Preload(size_t n, const Core::Context& context) { + size_t oldAvailable = AvailableBytes(); + while (true) { + size_t newAvailable = TryPreload(n, context); + if (newAvailable >= n) { + return newAvailable; + } + if (oldAvailable == newAvailable) { + throw std::runtime_error("Unexpected EOF of Avro stream."); + } + oldAvailable = newAvailable; + } + AZURE_UNREACHABLE_CODE(); +} + +size_t AvroStreamReader::TryPreload(size_t n, const Core::Context& context) { + size_t availableBytes = AvailableBytes(); + if (availableBytes >= n) { + return availableBytes; + } + const size_t MinRead = 4096; + size_t tryReadSize = (std::max)(n, MinRead); + size_t currSize = m_streambuffer.size(); + m_streambuffer.resize(m_streambuffer.size() + tryReadSize); + size_t actualReadSize = m_stream->Read(m_streambuffer.data() + currSize, tryReadSize, context); + m_streambuffer.resize(currSize + actualReadSize); + return AvailableBytes(); +} + +void AvroStreamReader::Discard() { + constexpr size_t MinimumReleaseMemory = 128 * 1024; + if (m_pos.Offset < MinimumReleaseMemory) { + return; + } + const size_t availableBytes = AvailableBytes(); + std::memmove(&m_streambuffer[0], &m_streambuffer[m_pos.Offset], availableBytes); + m_streambuffer.resize(availableBytes); + m_pos.Offset = 0; +} + +const AvroSchema AvroSchema::StringSchema(AvroDatumType::String); +const AvroSchema AvroSchema::BytesSchema(AvroDatumType::Bytes); +const AvroSchema AvroSchema::IntSchema(AvroDatumType::Int); +const AvroSchema AvroSchema::LongSchema(AvroDatumType::Long); +const AvroSchema AvroSchema::FloatSchema(AvroDatumType::Float); +const AvroSchema AvroSchema::DoubleSchema(AvroDatumType::Double); +const AvroSchema AvroSchema::BoolSchema(AvroDatumType::Bool); +const AvroSchema AvroSchema::NullSchema(AvroDatumType::Null); + +AvroSchema AvroSchema::RecordSchema(std::string name, + const std::vector>& fieldsSchema) { + AvroSchema recordSchema(AvroDatumType::Record); + recordSchema.m_name = std::move(name); + recordSchema.m_status = std::make_shared(); + for (auto& i : fieldsSchema) { + recordSchema.m_status->m_keys.push_back(i.first); + recordSchema.m_status->m_schemas.push_back(i.second); + } + return recordSchema; +} + +AvroSchema AvroSchema::ArraySchema(AvroSchema elementSchema) { + AvroSchema arraySchema(AvroDatumType::Array); + arraySchema.m_status = std::make_shared(); + arraySchema.m_status->m_schemas.push_back(std::move(elementSchema)); + return arraySchema; +} + +AvroSchema AvroSchema::MapSchema(AvroSchema elementSchema) { + AvroSchema mapSchema(AvroDatumType::Map); + mapSchema.m_status = std::make_shared(); + mapSchema.m_status->m_schemas.push_back(std::move(elementSchema)); + return mapSchema; +} + +AvroSchema AvroSchema::UnionSchema(std::vector schemas) { + AvroSchema unionSchema(AvroDatumType::Union); + unionSchema.m_status = std::make_shared(); + unionSchema.m_status->m_schemas = std::move(schemas); + return unionSchema; +} + +AvroSchema AvroSchema::FixedSchema(std::string name, int64_t size) { + AvroSchema fixedSchema(AvroDatumType::Fixed); + fixedSchema.m_name = std::move(name); + fixedSchema.m_status = std::make_shared(); + fixedSchema.m_status->m_size = size; + return fixedSchema; +} + +void AvroDatum::Fill(AvroStreamReader& reader, const Core::Context& context) { + m_data = reader.m_pos; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + int64_t stringSize = reader.ParseInt(context); + reader.Advance(static_cast(stringSize), context); + } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || + m_schema.Type() == AvroDatumType::Enum) { + reader.ParseInt(context); + } else if (m_schema.Type() == AvroDatumType::Float) { + reader.Advance(4, context); + } else if (m_schema.Type() == AvroDatumType::Double) { + reader.Advance(8, context); + } else if (m_schema.Type() == AvroDatumType::Bool) { + reader.Advance(1, context); + } else if (m_schema.Type() == AvroDatumType::Null) { + reader.Advance(0, context); + } else if (m_schema.Type() == AvroDatumType::Record) { + for (const auto& s : m_schema.FieldSchemas()) { + AvroDatum(s).Fill(reader, context); + } + } else if (m_schema.Type() == AvroDatumType::Array) { + while (true) { + int64_t numElementsInBlock = reader.ParseInt(context); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = reader.ParseInt(context); + reader.Advance(static_cast(blockSize), context); + } else { + for (auto i = 0; i < numElementsInBlock; ++i) { + AvroDatum(m_schema.ItemSchema()).Fill(reader, context); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Map) { + while (true) { + int64_t numElementsInBlock = reader.ParseInt(context); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = reader.ParseInt(context); + reader.Advance(static_cast(blockSize), context); + } else { + for (int64_t i = 0; i < numElementsInBlock; ++i) { + AvroDatum(AvroSchema::StringSchema).Fill(reader, context); + AvroDatum(m_schema.ItemSchema()).Fill(reader, context); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = reader.ParseInt(context); + AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(reader, context); + } else if (m_schema.Type() == AvroDatumType::Fixed) { + reader.Advance(m_schema.Size(), context); + } else { + AZURE_UNREACHABLE_CODE(); + } +} + +void AvroDatum::Fill(AvroStreamReader::ReaderPos& data) { + m_data = data; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + int64_t stringSize = parseInt(data); + data.Offset += static_cast(stringSize); + } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || + m_schema.Type() == AvroDatumType::Enum) { + parseInt(data); + } else if (m_schema.Type() == AvroDatumType::Float) { + data.Offset += 4; + } else if (m_schema.Type() == AvroDatumType::Double) { + data.Offset += 8; + } else if (m_schema.Type() == AvroDatumType::Bool) { + data.Offset += 1; + } else if (m_schema.Type() == AvroDatumType::Null) { + data.Offset += 0; + } else if (m_schema.Type() == AvroDatumType::Record) { + for (const auto& s : m_schema.FieldSchemas()) { + AvroDatum(s).Fill(data); + } + } else if (m_schema.Type() == AvroDatumType::Array) { + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = parseInt(data); + data.Offset += static_cast(blockSize); + } else { + for (auto i = 0; i < numElementsInBlock; ++i) { + AvroDatum(m_schema.ItemSchema()).Fill(data); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Map) { + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } else if (numElementsInBlock < 0) { + int64_t blockSize = parseInt(data); + data.Offset += static_cast(blockSize); + } else { + for (int64_t i = 0; i < numElementsInBlock; ++i) { + AvroDatum(AvroSchema::StringSchema).Fill(data); + AvroDatum(m_schema.ItemSchema()).Fill(data); + } + } + } + } else if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = parseInt(data); + AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(data); + } else if (m_schema.Type() == AvroDatumType::Fixed) { + data.Offset += m_schema.Size(); + } else { + AZURE_UNREACHABLE_CODE(); + } +} + +template <> +AvroDatum::StringView AvroDatum::Value() const { + auto data = m_data; + if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { + const int64_t length = parseInt(data); + const uint8_t* start = &(*data.BufferPtr)[data.Offset]; + StringView ret{start, static_cast(length)}; + data.Offset += static_cast(length); + return ret; + } + if (m_schema.Type() == AvroDatumType::Fixed) { + const size_t fixedSize = m_schema.Size(); + const uint8_t* start = &(*data.BufferPtr)[data.Offset]; + StringView ret{start, fixedSize}; + data.Offset += fixedSize; + return ret; + } + AZURE_UNREACHABLE_CODE(); +} + +template <> +std::string AvroDatum::Value() const { + auto stringView = Value(); + return std::string(stringView.Data, stringView.Data + stringView.Length); +} + +template <> +std::vector AvroDatum::Value() const { + auto stringView = Value(); + return std::vector(stringView.Data, stringView.Data + stringView.Length); +} + +template <> +int64_t AvroDatum::Value() const { + auto data = m_data; + return parseInt(data); +} + +template <> +int32_t AvroDatum::Value() const { + return static_cast(Value()); +} + +template <> +bool AvroDatum::Value() const { + return Value(); +} + +template <> +std::nullptr_t AvroDatum::Value() const { + return nullptr; +} + +template <> +AvroRecord AvroDatum::Value() const { + auto data = m_data; + + AvroRecord r; + r.m_keys = &m_schema.FieldNames(); + for (const auto& schema : m_schema.FieldSchemas()) { + auto datum = AvroDatum(schema); + datum.Fill(data); + r.m_values.push_back(std::move(datum)); + } + + return r; +} + +template <> +AvroMap AvroDatum::Value() const { + auto data = m_data; + + AvroMap m; + while (true) { + int64_t numElementsInBlock = parseInt(data); + if (numElementsInBlock == 0) { + break; + } + if (numElementsInBlock < 0) { + numElementsInBlock = -numElementsInBlock; + parseInt(data); + } + for (int64_t i = 0; i < numElementsInBlock; ++i) { + auto keyDatum = AvroDatum(AvroSchema::StringSchema); + keyDatum.Fill(data); + auto valueDatum = AvroDatum(m_schema.ItemSchema()); + valueDatum.Fill(data); + m[keyDatum.Value()] = valueDatum; + } + } + return m; +} + +template <> +AvroDatum AvroDatum::Value() const { + auto data = m_data; + if (m_schema.Type() == AvroDatumType::Union) { + int64_t i = parseInt(data); + auto datum = AvroDatum(m_schema.FieldSchemas()[static_cast(i)]); + datum.Fill(data); + return datum; + } + AZURE_UNREACHABLE_CODE(); +} + +AvroObjectContainerReader::AvroObjectContainerReader(Core::IO::BodyStream& stream) + : m_reader(std::make_unique(stream)) {} + +AvroDatum AvroObjectContainerReader::NextImpl(const AvroSchema* schema, const Core::Context& context) { + AZURE_ASSERT_FALSE(m_eof); + static const auto SyncMarkerSchema = AvroSchema::FixedSchema("Sync", 16); + if (!schema) { + static AvroSchema FileHeaderSchema = []() { + std::vector> fieldsSchema; + fieldsSchema.push_back(std::make_pair("magic", AvroSchema::FixedSchema("Magic", 4))); + fieldsSchema.push_back(std::make_pair("meta", AvroSchema::MapSchema(AvroSchema::BytesSchema))); + fieldsSchema.push_back(std::make_pair("sync", SyncMarkerSchema)); + return AvroSchema::RecordSchema("org.apache.avro.file.Header", std::move(fieldsSchema)); + }(); + auto fileHeaderDatum = AvroDatum(FileHeaderSchema); + fileHeaderDatum.Fill(*m_reader, context); + auto fileHeader = fileHeaderDatum.Value(); + if (fileHeader.Field("magic").Value() != "Obj\01") { + throw std::runtime_error("Invalid Avro object container magic."); + } + AvroMap meta = fileHeader.Field("meta").Value(); + std::string objectSchemaJson = meta["avro.schema"].Value(); + std::string codec = "null"; + if (meta.count("avro.codec") != 0) { + codec = meta["avro.codec"].Value(); + } + if (codec != "null") { + throw std::runtime_error("Unsupported Avro codec: " + codec); + } + m_syncMarker = fileHeader.Field("sync").Value(); + m_objectSchema = std::make_unique(ParseSchemaFromJsonString(objectSchemaJson)); + schema = m_objectSchema.get(); + } + + if (m_remainingObjectInCurrentBlock == 0) { + m_reader->Discard(); + m_remainingObjectInCurrentBlock = m_reader->ParseInt(context); + int64_t ObjectsSize = m_reader->ParseInt(context); + m_reader->Preload(static_cast(ObjectsSize), context); + } + + auto objectDatum = AvroDatum(*m_objectSchema); + objectDatum.Fill(*m_reader, context); + if (--m_remainingObjectInCurrentBlock == 0) { + auto markerDatum = AvroDatum(SyncMarkerSchema); + markerDatum.Fill(*m_reader, context); + auto marker = markerDatum.Value(); + if (marker != m_syncMarker) { + throw std::runtime_error("Sync marker doesn't match."); + } + m_eof = m_reader->TryPreload(1, context) == 0; + } + return objectDatum; +} + +size_t AvroStreamParser::OnRead(uint8_t* buffer, size_t count, Azure::Core::Context const& context) { + if (m_parserBuffer.Length != 0) { + size_t bytesToCopy = (std::min)(m_parserBuffer.Length, count); + std::memcpy(buffer, m_parserBuffer.Data, bytesToCopy); + m_parserBuffer.Data += bytesToCopy; + m_parserBuffer.Length -= bytesToCopy; + return bytesToCopy; + } + while (!m_parser.End()) { + auto datum = m_parser.Next(context); + if (datum.Schema().Type() == AvroDatumType::Union) { + datum = datum.Value(); + } + if (datum.Schema().Type() != AvroDatumType::Record) { + continue; + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.resultData") { + auto record = datum.Value(); + auto dataDatum = record.Field("data"); + m_parserBuffer = dataDatum.Value(); + return OnRead(buffer, count, context); + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.progress" && m_progressCallback) { + auto record = datum.Value(); + auto bytesScanned = record.Field("bytesScanned").Value(); + auto totalBytes = record.Field("totalBytes").Value(); + m_progressCallback(bytesScanned, totalBytes); + } + if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.error" && m_errorCallback) { + auto record = datum.Value(); + BlobQueryError e; + e.Name = record.Field("name").Value(); + e.Description = record.Field("description").Value(); + e.IsFatal = record.Field("fatal").Value(); + e.Position = record.Field("position").Value(); + m_errorCallback(std::move(e)); + } + } + return 0; +} +} // namespace _detail +} // namespace Blobs +} // namespace Storage +} // namespace Azure + +#endif diff --git a/source/libs/azure/src/td_block_blob_client.cpp b/source/libs/azure/src/td_block_blob_client.cpp index e75c6ae17f..b5a5c3c189 100644 --- a/source/libs/azure/src/td_block_blob_client.cpp +++ b/source/libs/azure/src/td_block_blob_client.cpp @@ -14,7 +14,7 @@ #include #endif -#include "./avro_parser.hpp" +#include "avro_parser.hpp" #include #include From af6ff0dd6abefd537f6cb3c09a000da9ebcf647b Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 16 Oct 2024 14:46:14 +0800 Subject: [PATCH 019/102] tcs: separate stream related stuff into a standalone module --- source/libs/tcs/inc/tcsInt.h | 59 +++++++++++++++++++++++++++++++++ source/libs/tcs/src/tcs.c | 46 ++----------------------- source/libs/tcs/src/tcsStream.c | 29 ++++++++++++++++ 3 files changed, 90 insertions(+), 44 deletions(-) create mode 100644 source/libs/tcs/inc/tcsInt.h create mode 100644 source/libs/tcs/src/tcsStream.c diff --git a/source/libs/tcs/inc/tcsInt.h b/source/libs/tcs/inc/tcsInt.h new file mode 100644 index 0000000000..b24a47aa98 --- /dev/null +++ b/source/libs/tcs/inc/tcsInt.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef _TD_TCS_INT_H_ +#define _TD_TCS_INT_H_ + +#include "os.h" +#include "tarray.h" +#include "tdef.h" +#include "tlog.h" +#include "tmsg.h" + +#ifdef __cplusplus +extern "C" { +#endif + +extern int8_t tsS3Ablob; + +typedef enum { + TOS_PROTO_NIL, + TOS_PROTO_S3, + TOS_PROTO_ABLOB, +} STosProto; + +typedef struct { + int32_t (*Begin)(); + void (*End)(); + int32_t (*CheckCfg)(); + + int32_t (*PutObjectFromFileOffset)(const char* file, const char* object_name, int64_t offset, int64_t size); + int32_t (*GetObjectBlock)(const char* object_name, int64_t offset, int64_t size, bool check, uint8_t** ppBlock); + + void (*DeleteObjectsByPrefix)(const char* prefix); + + int32_t (*PutObjectFromFile2)(const char* file, const char* object, int8_t withcp); + int32_t (*GetObjectsByPrefix)(const char* prefix, const char* path); + int32_t (*DeleteObjects)(const char* object_name[], int nobject); + int32_t (*GetObjectToFile)(const char* object_name, const char* fileName); +} STcs; + +extern STcs tcs; + +#ifdef __cplusplus +} +#endif + +#endif // _TD_TCS_INT_H_ diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index 5facffa4ac..c0933f68d0 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -14,39 +14,15 @@ */ #include "tcs.h" - #include "os.h" #include "taoserror.h" +#include "tcsInt.h" #include "tglobal.h" #include "az.h" #include "cos.h" -extern int8_t tsS3Ablob; - -typedef enum { - TOS_PROTO_NIL, - TOS_PROTO_S3, - TOS_PROTO_ABLOB, -} STosProto; - -typedef struct { - int32_t (*Begin)(); - void (*End)(); - int32_t (*CheckCfg)(); - - int32_t (*PutObjectFromFileOffset)(const char* file, const char* object_name, int64_t offset, int64_t size); - int32_t (*GetObjectBlock)(const char* object_name, int64_t offset, int64_t size, bool check, uint8_t** ppBlock); - - void (*DeleteObjectsByPrefix)(const char* prefix); - - int32_t (*PutObjectFromFile2)(const char* file, const char* object, int8_t withcp); - int32_t (*GetObjectsByPrefix)(const char* prefix, const char* path); - int32_t (*DeleteObjects)(const char* object_name[], int nobject); - int32_t (*GetObjectToFile)(const char* object_name, const char* fileName); -} STcs; - -static STcs tcs; +STcs tcs; int32_t tcsInit() { int32_t code = 0; @@ -108,12 +84,6 @@ int32_t tcsCheckCfg() { TAOS_RETURN(code); } - code = s3Begin(); - if (code != 0) { - (void)fprintf(stderr, "failed to begin s3.\n"); - TAOS_RETURN(code); - } - code = tcs.CheckCfg(); if (code != 0) { (void)fprintf(stderr, "failed to check s3.\n"); @@ -134,15 +104,3 @@ int32_t tcsGetObjectBlock(const char* object_name, int64_t offset, int64_t size, } void tcsDeleteObjectsByPrefix(const char* prefix) { return tcs.DeleteObjectsByPrefix(prefix); } - -int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { - return tcs.PutObjectFromFile2(file, object, withcp); -} - -int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } - -int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } - -int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { - return tcs.GetObjectToFile(object_name, fileName); -} diff --git a/source/libs/tcs/src/tcsStream.c b/source/libs/tcs/src/tcsStream.c new file mode 100644 index 0000000000..7cd4647ddd --- /dev/null +++ b/source/libs/tcs/src/tcsStream.c @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#include "tcs.h" +#include "tcsInt.h" + +int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { + return tcs.PutObjectFromFile2(file, object, withcp); +} + +int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } + +int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } + +int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { + return tcs.GetObjectToFile(object_name, fileName); +} From b05784f524b446ffa282302185962c458838aff4 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 16 Oct 2024 16:44:08 +0800 Subject: [PATCH 020/102] tcs: fix mac stb link issue --- source/libs/azure/src/az.cpp | 4 +- source/libs/stream/CMakeLists.txt | 6 +-- source/libs/tcs/CMakeLists.txt | 4 +- source/libs/tcs/src/tcs.c | 7 ++-- source/libs/tcs/test/tcsTest.cpp | 61 +++++++++++++++++++++++++++++++ 5 files changed, 72 insertions(+), 10 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 0af669c56a..80b5fb883e 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -49,13 +49,13 @@ static void azDumpCfgByEp(int8_t epIndex) { "%-24s %s\n" "%-24s %s\n" "%-24s %s\n" - // "%-24s %s\n" + "%-24s %s\n" "%-24s %s\n" "%-24s %s\n", "hostName", tsS3Hostname[epIndex], "bucketName", tsS3BucketName, "protocol", "https only", - //"uristyle", (uriStyleG[epIndex] == S3UriStyleVirtualHost ? "virtualhost" : "path"), + "uristyle", "path only", "accessKey", tsS3AccessKeyId[epIndex], "accessKeySecret", tsS3AccessKeySecret[epIndex]); // clang-format on diff --git a/source/libs/stream/CMakeLists.txt b/source/libs/stream/CMakeLists.txt index f08b16f836..53e26469f0 100644 --- a/source/libs/stream/CMakeLists.txt +++ b/source/libs/stream/CMakeLists.txt @@ -13,7 +13,7 @@ if(${BUILD_WITH_ROCKSDB}) target_link_libraries( stream PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index + PRIVATE os util transport qcom executor wal index tcs ) target_include_directories( stream @@ -32,13 +32,13 @@ if(${BUILD_WITH_ROCKSDB}) target_link_libraries( stream PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index + PRIVATE os util transport qcom executor wal index tcs ) else() target_link_libraries( stream PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index + PRIVATE os util transport qcom executor wal index tcs ) target_include_directories( stream diff --git a/source/libs/tcs/CMakeLists.txt b/source/libs/tcs/CMakeLists.txt index 1c914a18b9..e0de823c7a 100644 --- a/source/libs/tcs/CMakeLists.txt +++ b/source/libs/tcs/CMakeLists.txt @@ -1,6 +1,6 @@ -aux_source_directory(src TOS_SRC) +aux_source_directory(src TCS_SRC) -add_library(tcs STATIC ${TOS_SRC}) +add_library(tcs STATIC ${TCS_SRC}) target_include_directories( tcs PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index c0933f68d0..db02ca21fa 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -43,6 +43,7 @@ int32_t tcsInit() { tcs.GetObjectsByPrefix = s3GetObjectsByPrefix; tcs.DeleteObjects = s3DeleteObjects; tcs.GetObjectToFile = s3GetObjectToFile; + } else if (TOS_PROTO_ABLOB == proto) { tcs.Begin = azBegin; tcs.End = azEnd; @@ -74,19 +75,19 @@ int32_t tcsCheckCfg() { int32_t code = 0; if (!tsS3Enabled) { - (void)fprintf(stderr, "s3 not configured.\n"); + (void)fprintf(stderr, "tcs not configured.\n"); TAOS_RETURN(code); } code = tcsInit(); if (code != 0) { - (void)fprintf(stderr, "failed to initialize s3.\n"); + (void)fprintf(stderr, "failed to initialize tcs.\n"); TAOS_RETURN(code); } code = tcs.CheckCfg(); if (code != 0) { - (void)fprintf(stderr, "failed to check s3.\n"); + (void)fprintf(stderr, "failed to check tcs.\n"); TAOS_RETURN(code); } diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index 68b39bd710..e5593bfc18 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -2,6 +2,67 @@ #include #include #include + +#include "tcs.h" + +int32_t tcsInitEnv() { + int32_t code = 0; + + extern char tsS3Hostname[][TSDB_FQDN_LEN]; + extern char tsS3AccessKeyId[][TSDB_FQDN_LEN]; + extern char tsS3AccessKeySecret[][TSDB_FQDN_LEN]; + extern char tsS3BucketName[TSDB_FQDN_LEN]; + + /* TCS parameter format + tsS3Hostname[0] = "endpoint/.blob.core.windows.net"; + tsS3AccessKeyId[0] = ""; + tsS3AccessKeySecret[0] = ""; + tsS3BucketName = ""; + */ + tsS3Enabled = true; + + return code; +} + +// TEST(TcsTest, DISABLE_InterfaceTest) { +TEST(TcsTest, InterfaceTest) { + int code = 0; + + if (!tsS3Enabled) { + (void)fprintf(stderr, "tcs not configured.\n"); + + return; + } + + code = tcsInit(); + GTEST_ASSERT_EQ(code, 0); + + code = tcsCheckCfg(); + GTEST_ASSERT_EQ(code, 0); + /* + code = tcsPutObjectFromFileOffset(file, object_name, offset, size); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectBlock(object_name, offset, size, check, ppBlock); + GTEST_ASSERT_EQ(code, 0); + + tcsDeleteObjectsByPrefix(prefix); + // list object to check + + code = tcsPutObjectFromFile2(file, object, withcp); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectsByPrefix(prefix, path); + GTEST_ASSERT_EQ(code, 0); + code = tcsDeleteObjects(object_name, nobject); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectToFile(object_name, fileName); + GTEST_ASSERT_EQ(code, 0); + + // GTEST_ASSERT_NE(pEnv, nullptr); + */ + + tcsUninit(); +} + /* #include "walInt.h" const char* ranStr = "tvapq02tcp"; From d81fa80865acab41e92f865bdf7165e5ca2b5a60 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 16 Oct 2024 17:27:32 +0800 Subject: [PATCH 021/102] az: remove unused key newing --- .../mnode/impl/test/arbgroup/CMakeLists.txt | 2 +- source/libs/azure/src/az.cpp | 13 +--------- source/libs/stream/CMakeLists.txt | 13 +++++----- source/libs/stream/test/CMakeLists.txt | 2 +- source/libs/tcs/test/tcsTest.cpp | 24 +++++++++++++++---- 5 files changed, 28 insertions(+), 26 deletions(-) diff --git a/source/dnode/mnode/impl/test/arbgroup/CMakeLists.txt b/source/dnode/mnode/impl/test/arbgroup/CMakeLists.txt index 44ac305498..0da36e1f67 100644 --- a/source/dnode/mnode/impl/test/arbgroup/CMakeLists.txt +++ b/source/dnode/mnode/impl/test/arbgroup/CMakeLists.txt @@ -4,7 +4,7 @@ aux_source_directory(. MNODE_ARBGROUP_TEST_SRC) add_executable(arbgroupTest ${MNODE_ARBGROUP_TEST_SRC}) target_link_libraries( arbgroupTest - PRIVATE dnode nodes planner gtest qcom + PRIVATE dnode nodes planner gtest qcom tcs ) add_test( diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 80b5fb883e..b05f5be2ca 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -72,8 +72,6 @@ static int32_t azListBucket(char const *bucketname) { try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); - BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); std::string containerName = bucketname; @@ -216,8 +214,7 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - std::string accountURL = tsS3Hostname[0]; - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); + std::string accountURL = tsS3Hostname[0]; accountURL = "https://" + accountURL; BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); @@ -278,8 +275,6 @@ int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t si try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); - accountURL = "https://" + accountURL; BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); @@ -353,8 +348,6 @@ void azDeleteObjectsByPrefix(const char *prefix) { try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); - BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); std::string containerName = tsS3BucketName; @@ -414,8 +407,6 @@ int32_t azGetObjectToFile(const char *object_name, const char *fileName) { try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); - BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); std::string containerName = tsS3BucketName; @@ -450,8 +441,6 @@ int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); - StorageSharedKeyCredential *pSharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey); - BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); std::string containerName = tsS3BucketName; diff --git a/source/libs/stream/CMakeLists.txt b/source/libs/stream/CMakeLists.txt index 53e26469f0..27f5c46004 100644 --- a/source/libs/stream/CMakeLists.txt +++ b/source/libs/stream/CMakeLists.txt @@ -12,8 +12,8 @@ if(${BUILD_WITH_ROCKSDB}) if (${BUILD_CONTRIB}) target_link_libraries( stream - PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index tcs + PUBLIC rocksdb tdb tcs + PRIVATE os util transport qcom executor wal index ) target_include_directories( stream @@ -31,14 +31,14 @@ if(${BUILD_WITH_ROCKSDB}) ) target_link_libraries( stream - PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index tcs + PUBLIC rocksdb tdb tcs + PRIVATE os util transport qcom executor wal index ) else() target_link_libraries( stream - PUBLIC rocksdb tdb - PRIVATE os util transport qcom executor wal index tcs + PUBLIC rocksdb tdb tcs + PRIVATE os util transport qcom executor wal index ) target_include_directories( stream @@ -59,4 +59,3 @@ endif(${BUILD_WITH_ROCKSDB}) if(${BUILD_TEST}) ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) - diff --git a/source/libs/stream/test/CMakeLists.txt b/source/libs/stream/test/CMakeLists.txt index c472207b27..f2c985964d 100644 --- a/source/libs/stream/test/CMakeLists.txt +++ b/source/libs/stream/test/CMakeLists.txt @@ -101,4 +101,4 @@ IF(NOT TD_DARWIN) NAME backendTest COMMAND backendTest ) -ENDIF () \ No newline at end of file +ENDIF () diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index e5593bfc18..ed2cb59858 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -4,6 +4,7 @@ #include #include "tcs.h" +#include "tcsInt.h" int32_t tcsInitEnv() { int32_t code = 0; @@ -19,7 +20,21 @@ int32_t tcsInitEnv() { tsS3AccessKeySecret[0] = ""; tsS3BucketName = ""; */ + + const char *hostname = "endpoint/.blob.core.windows.net"; + const char *accessKeyId = ""; + const char *accessKeySecret = ""; + const char *bucketName = ""; + + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + + tstrncpy(tsTempDir, "/tmp/", PATH_MAX); + tsS3Enabled = true; + tsS3Ablob = true; return code; } @@ -28,11 +43,10 @@ int32_t tcsInitEnv() { TEST(TcsTest, InterfaceTest) { int code = 0; - if (!tsS3Enabled) { - (void)fprintf(stderr, "tcs not configured.\n"); - - return; - } + code = tcsInitEnv(); + GTEST_ASSERT_EQ(code, 0); + GTEST_ASSERT_EQ(tsS3Enabled, 1); + GTEST_ASSERT_EQ(tsS3Ablob, 1); code = tcsInit(); GTEST_ASSERT_EQ(code, 0); From 5e14f65fb9071239b0c40fba5a731fcb54c70ebb Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 16 Oct 2024 19:01:39 +0800 Subject: [PATCH 022/102] tcs/test: new blob test for checkcfg --- source/libs/tcs/test/CMakeLists.txt | 2 +- source/libs/tcs/test/tcsTest.cpp | 80 +++++++++++++++++++++++++---- 2 files changed, 70 insertions(+), 12 deletions(-) diff --git a/source/libs/tcs/test/CMakeLists.txt b/source/libs/tcs/test/CMakeLists.txt index 33fe75c589..f0a5fb97a6 100644 --- a/source/libs/tcs/test/CMakeLists.txt +++ b/source/libs/tcs/test/CMakeLists.txt @@ -9,7 +9,7 @@ target_include_directories(tcsTest target_link_libraries(tcsTest tcs - gtest_main + common gtest_main ) enable_testing() add_test( diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index ed2cb59858..e86c9e02b3 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -6,9 +6,11 @@ #include "tcs.h" #include "tcsInt.h" -int32_t tcsInitEnv() { +int32_t tcsInitEnv(int8_t isBlob) { int32_t code = 0; + extern int8_t tsS3EpNum; + extern char tsS3Hostname[][TSDB_FQDN_LEN]; extern char tsS3AccessKeyId[][TSDB_FQDN_LEN]; extern char tsS3AccessKeySecret[][TSDB_FQDN_LEN]; @@ -21,20 +23,38 @@ int32_t tcsInitEnv() { tsS3BucketName = ""; */ - const char *hostname = "endpoint/.blob.core.windows.net"; - const char *accessKeyId = ""; - const char *accessKeySecret = ""; - const char *bucketName = ""; + tsS3Ablob = isBlob; + if (isBlob) { + const char *hostname = "endpoint/.blob.core.windows.net"; + const char *accessKeyId = ""; + const char *accessKeySecret = ""; + const char *bucketName = ""; - tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); - tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + + } else { + const char *hostname = "endpoint/.blob.core.windows.net"; + const char *accessKeyId = ""; + const char *accessKeySecret = ""; + const char *bucketName = ""; + + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + + // setup s3 env + tsS3EpNum = 1; + } tstrncpy(tsTempDir, "/tmp/", PATH_MAX); tsS3Enabled = true; - tsS3Ablob = true; + if (!tsS3Ablob) { + } return code; } @@ -43,7 +63,7 @@ int32_t tcsInitEnv() { TEST(TcsTest, InterfaceTest) { int code = 0; - code = tcsInitEnv(); + code = tcsInitEnv(true); GTEST_ASSERT_EQ(code, 0); GTEST_ASSERT_EQ(tsS3Enabled, 1); GTEST_ASSERT_EQ(tsS3Ablob, 1); @@ -77,6 +97,44 @@ TEST(TcsTest, InterfaceTest) { tcsUninit(); } +// TEST(TcsTest, DISABLE_InterfaceNonBlobTest) { +TEST(TcsTest, InterfaceNonBlobTest) { + int code = 0; + + code = tcsInitEnv(false); + GTEST_ASSERT_EQ(code, 0); + GTEST_ASSERT_EQ(tsS3Enabled, 1); + GTEST_ASSERT_EQ(tsS3Ablob, 0); + + code = tcsInit(); + GTEST_ASSERT_EQ(code, 0); + + code = tcsCheckCfg(); + GTEST_ASSERT_EQ(code, 0); + /* + code = tcsPutObjectFromFileOffset(file, object_name, offset, size); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectBlock(object_name, offset, size, check, ppBlock); + GTEST_ASSERT_EQ(code, 0); + + tcsDeleteObjectsByPrefix(prefix); + // list object to check + + code = tcsPutObjectFromFile2(file, object, withcp); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectsByPrefix(prefix, path); + GTEST_ASSERT_EQ(code, 0); + code = tcsDeleteObjects(object_name, nobject); + GTEST_ASSERT_EQ(code, 0); + code = tcsGetObjectToFile(object_name, fileName); + GTEST_ASSERT_EQ(code, 0); + + // GTEST_ASSERT_NE(pEnv, nullptr); + */ + + tcsUninit(); +} + /* #include "walInt.h" const char* ranStr = "tvapq02tcp"; From 445dfddc1f149d3c1a4854bd81d40ee5568ea5f1 Mon Sep 17 00:00:00 2001 From: sheyanjie-qq <249478495@qq.com> Date: Wed, 16 Oct 2024 19:23:21 +0800 Subject: [PATCH 023/102] java websocket use own protocal --- .../com/taos/example/SchemalessWsTest.java | 4 +- .../com/taos/example/WSConnectExample.java | 4 +- .../example/WSParameterBindingBasicDemo.java | 7 +- .../example/WSParameterBindingFullDemo.java | 21 +++--- .../07-develop/01-connect/_connect_rust.mdx | 2 +- docs/zh/07-develop/01-connect/index.md | 26 +++---- docs/zh/07-develop/02-sql.md | 16 ++-- docs/zh/07-develop/04-schemaless.md | 2 +- docs/zh/07-develop/05-stmt.md | 2 +- docs/zh/07-develop/07-tmq.md | 16 ++-- docs/zh/08-operation/18-dual.md | 17 ++--- .../01-components/03-taosadapter.md | 4 +- docs/zh/14-reference/05-connector/10-cpp.mdx | 20 ++--- docs/zh/14-reference/05-connector/14-java.mdx | 73 +++++++++++-------- docs/zh/14-reference/05-connector/26-rust.mdx | 18 ++--- .../14-reference/05-connector/30-python.mdx | 8 +- docs/zh/14-reference/05-connector/35-node.mdx | 6 +- .../14-reference/05-connector/40-csharp.mdx | 2 +- docs/zh/14-reference/05-connector/index.md | 2 +- 19 files changed, 131 insertions(+), 119 deletions(-) diff --git a/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java b/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java index 08f66c2227..0f35e38f57 100644 --- a/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java +++ b/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java @@ -17,8 +17,8 @@ public class SchemalessWsTest { private static final String jsonDemo = "{\"metric\": \"metric_json\",\"timestamp\": 1626846400,\"value\": 10.3, \"tags\": {\"groupid\": 2, \"location\": \"California.SanFrancisco\", \"id\": \"d1001\"}}"; public static void main(String[] args) throws SQLException { - final String url = "jdbc:TAOS-RS://" + host + ":6041?user=root&password=taosdata&batchfetch=true"; - try(Connection connection = DriverManager.getConnection(url)){ + final String url = "jdbc:TAOS-WS://" + host + ":6041?user=root&password=taosdata"; + try (Connection connection = DriverManager.getConnection(url)) { init(connection); AbstractConnection conn = connection.unwrap(AbstractConnection.class); diff --git a/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java b/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java index afe74ace83..052af71a83 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java @@ -12,9 +12,9 @@ public class WSConnectExample { public static void main(String[] args) throws Exception { // use // String jdbcUrl = - // "jdbc:TAOS-RS://localhost:6041/dbName?user=root&password=taosdata&batchfetch=true"; + // "jdbc:TAOS-WS://localhost:6041/dbName?user=root&password=taosdata"; // if you want to connect a specified database named "dbName". - String jdbcUrl = "jdbc:TAOS-RS://localhost:6041?user=root&password=taosdata&batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://localhost:6041?user=root&password=taosdata"; Properties connProps = new Properties(); connProps.setProperty(TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT, "true"); connProps.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8"); diff --git a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java index eab8df06b9..1353ebbddc 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java @@ -15,7 +15,7 @@ public class WSParameterBindingBasicDemo { public static void main(String[] args) throws SQLException { - String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041"; try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) { init(conn); @@ -40,7 +40,7 @@ public class WSParameterBindingBasicDemo { pstmt.setFloat(4, random.nextFloat()); pstmt.addBatch(); } - int [] exeResult = pstmt.executeBatch(); + int[] exeResult = pstmt.executeBatch(); // you can check exeResult here System.out.println("Successfully inserted " + exeResult.length + " rows to power.meters."); } @@ -60,7 +60,8 @@ public class WSParameterBindingBasicDemo { try (Statement stmt = conn.createStatement()) { stmt.execute("CREATE DATABASE IF NOT EXISTS power"); stmt.execute("USE power"); - stmt.execute("CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))"); + stmt.execute( + "CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))"); } } } diff --git a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java index f23fb187f4..7eaccb3db2 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java @@ -40,7 +40,7 @@ public class WSParameterBindingFullDemo { public static void main(String[] args) throws SQLException { - String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041/"; try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) { @@ -51,8 +51,10 @@ public class WSParameterBindingFullDemo { stmtAll(conn); } catch (SQLException ex) { - // handle any errors, please refer to the JDBC specifications for detailed exceptions info - System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + ex.getMessage()); + // handle any errors, please refer to the JDBC specifications for detailed + // exceptions info + System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + + ex.getMessage()); throw ex; } catch (Exception ex) { System.out.println("Failed to insert data using stmt, ErrMessage: " + ex.getMessage()); @@ -104,30 +106,29 @@ public class WSParameterBindingFullDemo { pstmt.setTagBoolean(3, true); pstmt.setTagString(4, "binary_value"); pstmt.setTagNString(5, "nchar_value"); - pstmt.setTagVarbinary(6, new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e}); - pstmt.setTagGeometry(7, new byte[]{ + pstmt.setTagVarbinary(6, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e }); + pstmt.setTagGeometry(7, new byte[] { 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x40, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x59, 0x40}); + 0x00, 0x00, 0x00, 0x59, 0x40 }); long current = System.currentTimeMillis(); - pstmt.setTimestamp(1, new Timestamp(current)); pstmt.setInt(2, 1); pstmt.setDouble(3, 1.1); pstmt.setBoolean(4, true); pstmt.setString(5, "binary_value"); pstmt.setNString(6, "nchar_value"); - pstmt.setVarbinary(7, new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e}); - pstmt.setGeometry(8, new byte[]{ + pstmt.setVarbinary(7, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e }); + pstmt.setGeometry(8, new byte[] { 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x40, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x59, 0x40}); + 0x00, 0x00, 0x00, 0x59, 0x40 }); pstmt.addBatch(); pstmt.executeBatch(); System.out.println("Successfully inserted rows to example_all_type_stmt.ntb"); diff --git a/docs/zh/07-develop/01-connect/_connect_rust.mdx b/docs/zh/07-develop/01-connect/_connect_rust.mdx index 0e65e8f920..d88a3335ca 100644 --- a/docs/zh/07-develop/01-connect/_connect_rust.mdx +++ b/docs/zh/07-develop/01-connect/_connect_rust.mdx @@ -3,6 +3,6 @@ ``` :::note -对于 Rust 连接器, 连接方式的不同只体现在使用的特性不同。如果启用了 "ws" 特性,那么只有 Websocket 的实现会被编译进来。 +对于 Rust 连接器, 连接方式的不同只体现在使用的特性不同。如果启用了 "ws" 特性,那么只有 WebSocket 的实现会被编译进来。 ::: diff --git a/docs/zh/07-develop/01-connect/index.md b/docs/zh/07-develop/01-connect/index.md index 1dfb95d169..bd26bea46d 100644 --- a/docs/zh/07-develop/01-connect/index.md +++ b/docs/zh/07-develop/01-connect/index.md @@ -28,7 +28,7 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 1. 通过客户端驱动程序 taosc 直接与服务端程序 taosd 建立连接,这种连接方式下文中简称 “原生连接”。 2. 通过 taosAdapter 组件提供的 REST API 建立与 taosd 的连接,这种连接方式下文中简称 “REST 连接” -3. 通过 taosAdapter 组件提供的 Websocket API 建立与 taosd 的连接,这种连接方式下文中简称 “Websocket 连接” +3. 通过 taosAdapter 组件提供的 WebSocket API 建立与 taosd 的连接,这种连接方式下文中简称 “WebSocket 连接” ![TDengine connection type](connection-type-zh.webp) @@ -38,9 +38,9 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 关键不同点在于: 1. 使用 原生连接,需要保证客户端的驱动程序 taosc 和服务端的 TDengine 版本配套。 -2. 使用 REST 连接,用户无需安装客户端驱动程序 taosc,具有跨平台易用的优势,但是无法体验数据订阅和二进制数据类型等功能。另外与 原生连接 和 Websocket 连接相比,REST连接的性能最低。REST 接口是无状态的。在使用 REST 连接时,需要在 SQL 中指定表、超级表的数据库名称。 -3. 使用 Websocket 连接,用户也无需安装客户端驱动程序 taosc。 -4. 连接云服务实例,必须使用 REST 连接 或 Websocket 连接。 +2. 使用 REST 连接,用户无需安装客户端驱动程序 taosc,具有跨平台易用的优势,但是无法体验数据订阅和二进制数据类型等功能。另外与 原生连接 和 WebSocket 连接相比,REST连接的性能最低。REST 接口是无状态的。在使用 REST 连接时,需要在 SQL 中指定表、超级表的数据库名称。 +3. 使用 WebSocket 连接,用户也无需安装客户端驱动程序 taosc。 +4. 连接云服务实例,必须使用 REST 连接 或 WebSocket 连接。 **推荐使用 WebSocket 连接** @@ -126,7 +126,7 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 ```bash pip3 install taos-ws-py ``` - :::note 此安装包为 Websocket 连接器 + :::note 此安装包为 WebSocket 连接器 - 同时安装 `taospy` 和 `taos-ws-py` ```bash pip3 install taospy[ws] @@ -182,7 +182,7 @@ taos = { version = "*"} ``` :::info -Rust 连接器通过不同的特性区分不同的连接方式。默认同时支持原生连接和 Websocket 连接,如果仅需要建立 Websocket 连接,可设置 `ws` 特性: +Rust 连接器通过不同的特性区分不同的连接方式。默认同时支持原生连接和 WebSocket 连接,如果仅需要建立 WebSocket 连接,可设置 `ws` 特性: ```toml taos = { version = "*", default-features = false, features = ["ws"] } @@ -201,7 +201,7 @@ taos = { version = "*", default-features = false, features = ["ws"] } ``` npm install @tdengine/websocket ``` - :::note Node.js 目前只支持 Websocket 连接 + :::note Node.js 目前只支持 WebSocket 连接 - **安装验证** - 新建安装验证目录,例如:`~/tdengine-test`,下载 GitHub 上 [nodejsChecker.js 源代码](https://github.com/taosdata/TDengine/tree/main/docs/examples/node/websocketexample/nodejsChecker.js)到本地。 - 在命令行中执行以下命令。 @@ -271,12 +271,10 @@ dotnet add package TDengine.Connector Java 连接器建立连接的参数有 URL 和 Properties。 TDengine 的 JDBC URL 规范格式为: - `jdbc:[TAOS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` + `jdbc:[TAOS|TAOS-WS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` URL 和 Properties 的详细参数说明和如何使用详见 [url 规范](../../reference/connector/java/#url-规范) - **注**:REST 连接中增加 `batchfetch` 参数并设置为 true,将开启 WebSocket 连接。 - Python 连接器使用 `connect()` 方法来建立连接,下面是连接参数的具体说明: @@ -387,8 +385,8 @@ DSN 的详细说明和如何使用详见 [连接功能](../../reference/connecto - `reconnectIntervalMs`:重连间隔毫秒时间,默认为 2000。 -**Websocket 连接** -C/C++ 语言连接器 Websocket 连接方式使用 `ws_connect()` 函数用于建立与 TDengine 数据库的连接。其参数为 DSN 描述字符串,其基本结构如下: +**WebSocket 连接** +C/C++ 语言连接器 WebSocket 连接方式使用 `ws_connect()` 函数用于建立与 TDengine 数据库的连接。其参数为 DSN 描述字符串,其基本结构如下: ```text [+]://[[:@]:][/][?=[&=]] @@ -417,8 +415,8 @@ C/C++ 语言连接器原生连接方式使用 `taos_connect()` 函数用于建 -### Websocket 连接 -下面是各语言连接器建立 Websocket 连接代码样例。演示了如何使用 Websocket 连接方式连接到 TDengine 数据库,并对连接设定一些参数。整个过程主要涉及到数据库连接的建立和异常处理。 +### WebSocket 连接 +下面是各语言连接器建立 WebSocket 连接代码样例。演示了如何使用 WebSocket 连接方式连接到 TDengine 数据库,并对连接设定一些参数。整个过程主要涉及到数据库连接的建立和异常处理。 diff --git a/docs/zh/07-develop/02-sql.md b/docs/zh/07-develop/02-sql.md index 5461c975dd..b4274045fc 100644 --- a/docs/zh/07-develop/02-sql.md +++ b/docs/zh/07-develop/02-sql.md @@ -33,7 +33,7 @@ REST API:直接调用 `taosadapter` 提供的 REST API 接口,进行数据 -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/create_db_ws.py}} ``` @@ -69,7 +69,7 @@ REST API:直接调用 `taosadapter` 提供的 REST API 接口,进行数据 -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/create_db_demo.c:create_db_and_table}} ``` @@ -114,7 +114,7 @@ NOW 为系统内部函数,默认为客户端所在计算机当前时间。 NOW -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/insert_ws.py}} ``` @@ -151,7 +151,7 @@ NOW 为系统内部函数,默认为客户端所在计算机当前时间。 NOW -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/insert_data_demo.c:insert_data}} ``` @@ -189,7 +189,7 @@ curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql' \ -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/query_ws.py}} ``` @@ -230,7 +230,7 @@ rust 连接器还支持使用 **serde** 进行反序列化行为结构体的结 -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/query_data_demo.c:query_data}} ``` @@ -273,7 +273,7 @@ reqId 可用于请求链路追踪,reqId 就像分布式系统中的 traceId -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/reqid_ws.py}} ``` @@ -310,7 +310,7 @@ reqId 可用于请求链路追踪,reqId 就像分布式系统中的 traceId -```c "Websocket 连接" +```c "WebSocket 连接" {{#include docs/examples/c-ws/with_reqid_demo.c:with_reqid}} ``` diff --git a/docs/zh/07-develop/04-schemaless.md b/docs/zh/07-develop/04-schemaless.md index a865b58b28..bf10b41736 100644 --- a/docs/zh/07-develop/04-schemaless.md +++ b/docs/zh/07-develop/04-schemaless.md @@ -191,7 +191,7 @@ st,t1=3,t2=4,t3=t3 c1=3i64,c6="passit" 1626006833640000000 ::: -### Websocket 连接 +### WebSocket 连接 diff --git a/docs/zh/07-develop/05-stmt.md b/docs/zh/07-develop/05-stmt.md index 624600ba4d..74b44ba8e6 100644 --- a/docs/zh/07-develop/05-stmt.md +++ b/docs/zh/07-develop/05-stmt.md @@ -23,7 +23,7 @@ import TabItem from "@theme/TabItem"; - 执行批量插入操作,将这些数据行插入到对应的子表中。 3. 最后打印实际插入表中的行数。 -## Websocket 连接 +## WebSocket 连接 ```java diff --git a/docs/zh/07-develop/07-tmq.md b/docs/zh/07-develop/07-tmq.md index c668203259..a91a764c67 100644 --- a/docs/zh/07-develop/07-tmq.md +++ b/docs/zh/07-develop/07-tmq.md @@ -94,7 +94,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 -- Websocket 连接: 因为使用 dsn,不需要 `td.connect.ip`,`td.connect.port`,`td.connect.user` 和 `td.connect.pass` 四个配置项,其余同通用配置项。 +- WebSocket 连接: 因为使用 dsn,不需要 `td.connect.ip`,`td.connect.port`,`td.connect.user` 和 `td.connect.pass` 四个配置项,其余同通用配置项。 - 原生连接: 同通用基础配置项。 @@ -103,8 +103,8 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 -### Websocket 连接 -介绍各语言连接器使用 Websocket 连接方式创建消费者。指定连接的服务器地址,设置自动提交,从最新消息开始消费,指定 `group.id` 和 `client.id` 等信息。有的语言的连接器还支持反序列化参数。 +### WebSocket 连接 +介绍各语言连接器使用 WebSocket 连接方式创建消费者。指定连接的服务器地址,设置自动提交,从最新消息开始消费,指定 `group.id` 和 `client.id` 等信息。有的语言的连接器还支持反序列化参数。 @@ -234,7 +234,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 订阅消费数据 消费者订阅主题后,可以开始接收并处理这些主题中的消息。订阅消费数据的示例代码如下: -### Websocket 连接 +### WebSocket 连接 @@ -403,7 +403,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 指定订阅的 Offset 消费者可以指定从特定 Offset 开始读取分区中的消息,这允许消费者重读消息或跳过已处理的消息。下面展示各语言连接器如何指定订阅的 Offset。 -### Websocket 连接 +### WebSocket 连接 @@ -549,7 +549,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 **注意**:手工提交消费进度前确保消息正常处理完成,否则处理出错的消息不会被再次消费。自动提交是在本次 `poll` 消息时可能会提交上次消息的消费进度,因此请确保消息处理完毕再进行下一次 `poll` 或消息获取。 -### Websocket 连接 +### WebSocket 连接 @@ -663,7 +663,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 取消订阅和关闭消费 消费者可以取消对主题的订阅,停止接收消息。当消费者不再需要时,应该关闭消费者实例,以释放资源和断开与 TDengine 服务器的连接。 -### Websocket 连接 +### WebSocket 连接 @@ -766,7 +766,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 完整示例 -### Websocket 连接 +### WebSocket 连接
diff --git a/docs/zh/08-operation/18-dual.md b/docs/zh/08-operation/18-dual.md index c7871a8e1e..caddb7ab3b 100644 --- a/docs/zh/08-operation/18-dual.md +++ b/docs/zh/08-operation/18-dual.md @@ -30,9 +30,8 @@ toc_max_heading_level: 4 目前只有 Java 连接器在 WebSocket 连接模式下支持双活,其配置示例如下 ```java -url = "jdbc:TAOS-RS://" + host + ":6041/?user=root&password=taosdata"; +url = "jdbc:TAOS-WS://" + host + ":6041/?user=root&password=taosdata"; Properties properties = new Properties(); -properties.setProperty(TSDBDriver.PROPERTY_KEY_BATCH_LOAD, "true"); properties.setProperty(TSDBDriver.PROPERTY_KEY_SLAVE_CLUSTER_HOST, "192.168.1.11"); properties.setProperty(TSDBDriver.PROPERTY_KEY_SLAVE_CLUSTER_PORT, "6041"); properties.setProperty(TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT, "true"); @@ -43,13 +42,13 @@ connection = DriverManager.getConnection(url, properties); 其中的配置属性及含义如下表 -| 属性名 | 含义 | -| ----------------- | ------------------ | -| PROPERTY_KEY_SLAVE_CLUSTER_HOST | 第二节点的主机名或者 ip,默认空 | -| PROPERTY_KEY_SLAVE_CLUSTER_PORT | 第二节点的端口号,默认空 | -| PROPERTY_KEY_ENABLE_AUTO_RECONNECT | 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。双活场景下请设置为 true | -| PROPERTY_KEY_RECONNECT_INTERVAL_MS | 重连的时间间隔,单位毫秒:默认 2000 毫秒,也就是 2 秒;最小值为 0, 表示立即重试;最大值不做限制 | -| PROPERTY_KEY_RECONNECT_RETRY_COUNT | 每节点最多重试次数:默认值为 3;最小值为 0,表示不进行重试;最大值不做限制 | +| 属性名 | 含义 | +| ---------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| PROPERTY_KEY_SLAVE_CLUSTER_HOST | 第二节点的主机名或者 ip,默认空 | +| PROPERTY_KEY_SLAVE_CLUSTER_PORT | 第二节点的端口号,默认空 | +| PROPERTY_KEY_ENABLE_AUTO_RECONNECT | 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。双活场景下请设置为 true | +| PROPERTY_KEY_RECONNECT_INTERVAL_MS | 重连的时间间隔,单位毫秒:默认 2000 毫秒,也就是 2 秒;最小值为 0, 表示立即重试;最大值不做限制 | +| PROPERTY_KEY_RECONNECT_RETRY_COUNT | 每节点最多重试次数:默认值为 3;最小值为 0,表示不进行重试;最大值不做限制 | ### 约束条件 diff --git a/docs/zh/14-reference/01-components/03-taosadapter.md b/docs/zh/14-reference/01-components/03-taosadapter.md index a8e8fc3418..7d69cab598 100644 --- a/docs/zh/14-reference/01-components/03-taosadapter.md +++ b/docs/zh/14-reference/01-components/03-taosadapter.md @@ -306,7 +306,7 @@ http 返回内容: ## taosAdapter 监控指标 -taosAdapter 采集 REST/Websocket 相关请求的监控指标。将监控指标上报给 taosKeeper,这些监控指标会被 taosKeeper 写入监控数据库,默认是 `log` 库,可以在 taoskeeper 配置文件中修改。以下是这些监控指标的详细介绍。 +taosAdapter 采集 REST/WebSocket 相关请求的监控指标。将监控指标上报给 taosKeeper,这些监控指标会被 taosKeeper 写入监控数据库,默认是 `log` 库,可以在 taoskeeper 配置文件中修改。以下是这些监控指标的详细介绍。 #### adapter\_requests 表 @@ -331,7 +331,7 @@ taosAdapter 采集 REST/Websocket 相关请求的监控指标。将监控指标 | query\_in\_process | INT UNSIGNED | | 正在处理查询请求数 | | write\_in\_process | INT UNSIGNED | | 正在处理写入请求数 | | endpoint | VARCHAR | | 请求端点 | -| req\_type | NCHAR | TAG | 请求类型:0 为 REST,1 为 Websocket | +| req\_type | NCHAR | TAG | 请求类型:0 为 REST,1 为 WebSocket | ## 结果返回条数限制 diff --git a/docs/zh/14-reference/05-connector/10-cpp.mdx b/docs/zh/14-reference/05-connector/10-cpp.mdx index c618601fb9..7164baad2a 100644 --- a/docs/zh/14-reference/05-connector/10-cpp.mdx +++ b/docs/zh/14-reference/05-connector/10-cpp.mdx @@ -5,14 +5,14 @@ toc_max_heading_level: 4 --- C/C++ 开发人员可以使用 TDengine 的客户端驱动,即 C/C++连接器 (以下都用 TDengine 客户端驱动表示),开发自己的应用来连接 TDengine 集群完成数据存储、查询以及其他功能。TDengine 客户端驱动的 API 类似于 MySQL 的 C API。应用程序使用时,需要包含 TDengine 头文件,里面列出了提供的 API 的函数原型;应用程序还要链接到所在平台上对应的动态库。 -TDengine 的客户端驱动提供了 taosws 和 taos 两个动态库,分别支持 Websocket 连接和原生连接。 Websocket 连接和原生连接的区别是 Websocket 连接方式不要求客户端和服务端版本完全匹配,而原生连接要求,在性能上 Websocket 连接方式也接近于原生连接,一般我们推荐使用 Websocket 连接方式。 +TDengine 的客户端驱动提供了 taosws 和 taos 两个动态库,分别支持 WebSocket 连接和原生连接。 WebSocket 连接和原生连接的区别是 WebSocket 连接方式不要求客户端和服务端版本完全匹配,而原生连接要求,在性能上 WebSocket 连接方式也接近于原生连接,一般我们推荐使用 WebSocket 连接方式。 下面我们分开介绍两种连接方式的使用方法。 -## Websocket 连接方式 +## WebSocket 连接方式 -Websocket 连接方式需要使用 taosws.h 头文件和 taosws 动态库。 +WebSocket 连接方式需要使用 taosws.h 头文件和 taosws 动态库。 ```c #include @@ -44,7 +44,7 @@ TDengine 客户端驱动的动态库位于: ### 错误码 在 C 接口的设计中,错误码采用整数类型表示,每个错误码都对应一个特定的错误状态。如未特别说明,当 API 的返回值是整数时,_0_ 代表成功,其它是代表失败原因的错误码,当返回值是指针时, _NULL_ 表示失败。 -Websocket 连接方式单独的错误码在 `taosws.h` 中, +WebSocket 连接方式单独的错误码在 `taosws.h` 中, | 错误码 | 错误描述 | 可能的出错场景或者可能的原因 | 建议用户采取的措施 | @@ -82,7 +82,7 @@ WebSocket 连接方式错误码只保留了原生连接错误码的后两个字 #### DSN -C/C++ Websocket 连接器通过 DSN 连接描述字符串来表示连接信息。 +C/C++ WebSocket 连接器通过 DSN 连接描述字符串来表示连接信息。 DSN 描述字符串基本结构如下: ```text @@ -96,16 +96,16 @@ DSN 描述字符串基本结构如下: - **driver**: 必须指定驱动名以便连接器选择何种方式创建连接,支持如下驱动名: - **taos**: 默认驱动,支持 SQL 执行,参数绑定,无模式写入。 - **tmq**: 使用 TMQ 订阅数据。 -- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 Websocket 方式建立连接。 - - **http/ws**: 使用 Websocket 协议。 - - **https/wss**: 在 Websocket 连接方式下显示启用 SSL/TLS 协议。 +- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 WebSocket 方式建立连接。 + - **http/ws**: 使用 WebSocket 协议。 + - **https/wss**: 在 WebSocket 连接方式下显示启用 SSL/TLS 协议。 - **username/password**: 用于创建连接的用户名及密码。 -- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时 Websocket 连接默认为 `localhost:6041` 。 +- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时 WebSocket 连接默认为 `localhost:6041` 。 - **database**: 指定默认连接的数据库名,可选参数。 - **params**:其他可选参数。 -一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 Websocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 +一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 WebSocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 #### 基础 API diff --git a/docs/zh/14-reference/05-connector/14-java.mdx b/docs/zh/14-reference/05-connector/14-java.mdx index ba4cb38afd..0a167dd5ee 100644 --- a/docs/zh/14-reference/05-connector/14-java.mdx +++ b/docs/zh/14-reference/05-connector/14-java.mdx @@ -33,14 +33,15 @@ REST 连接支持所有能运行 Java 的平台。 | taos-jdbcdriver 版本 | 主要变化 | TDengine 版本 | | :------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------: | -| 3.3.3 | 1. 解决了 Websocket statement 关闭导致的内存泄漏 | - | -| 3.3.2 | 1. 优化 Websocket 连接下的参数绑定性能;2. 优化了对 mybatis 的支持 | - | -| 3.3.0 | 1. 优化 Websocket 连接下的数据传输性能;2. 支持跳过 SSL 验证,默认关闭 | 3.3.2.0 及更高版本 | +| 3.4.0 | 1. 使用 jackson 库替换 fastjson 库;2. WebSocket 采用独立协议标识;3. 优化后台拉取线程使用,避免用户误用导致超时。 | - | +| 3.3.3 | 1. 解决了 WebSocket statement 关闭导致的内存泄漏 | - | +| 3.3.2 | 1. 优化 WebSocket 连接下的参数绑定性能;2. 优化了对 mybatis 的支持 | - | +| 3.3.0 | 1. 优化 WebSocket 连接下的数据传输性能;2. 支持跳过 SSL 验证,默认关闭 | 3.3.2.0 及更高版本 | | 3.2.11 | 解决了 Native 连接关闭结果集 bug | - | -| 3.2.10 | 1. REST/WebSocket 连接支持传输中的数据压缩;2. Websocket 自动重连机制,默认关闭;3. Connection 类提供无模式写入的方法;4. 优化了原生连接的数据拉取性能;5. 修复了一些已知问题;6.元数据获取函数可以返回支持的函数列表。 | - | -| 3.2.9 | 解决了 Websocket prepareStatement 关闭 bug | - | -| 3.2.8 | 优化了自动提交, 解决了 websocket 手动提交 bug, 优化 Websocket prepareStatement 使用一个连接, 元数据支持视图 | - | -| 3.2.7 | 支持 VARBINARY 和 GEOMETRY 类型,增加 native 连接的时区设置支持。增加 websocket 自动重连功能。 | 3.2.0.0 及更高版本 | +| 3.2.10 | 1. REST/WebSocket 连接支持传输中的数据压缩;2. WebSocket 自动重连机制,默认关闭;3. Connection 类提供无模式写入的方法;4. 优化了原生连接的数据拉取性能;5. 修复了一些已知问题;6.元数据获取函数可以返回支持的函数列表。 | - | +| 3.2.9 | 解决了 WebSocket prepareStatement 关闭 bug | - | +| 3.2.8 | 优化了自动提交, 解决了 WebSocket 手动提交 bug, 优化 WebSocket prepareStatement 使用一个连接, 元数据支持视图 | - | +| 3.2.7 | 支持 VARBINARY 和 GEOMETRY 类型,增加 native 连接的时区设置支持。增加 WebSocket 自动重连功能。 | 3.2.0.0 及更高版本 | | 3.2.5 | 数据订阅增加 committed()、assignment() 方法 | 3.1.0.3 及更高版本 | | 3.2.4 | 数据订阅在 WebSocket 连接下增加 enable.auto.commit 参数,以及 unsubscribe() 方法。 | - | | 3.2.3 | 修复 ResultSet 在一些情况数据解析失败 | - | @@ -195,17 +196,14 @@ WKB规范请参考[Well-Known Binary (WKB)](https://libgeos.org/specifications/w ## API 参考 ### JDBC 驱动 -taos-jdbcdriver 实现了 JDBC 标准的 Driver 接口,提供了两个实现类:RestfulDriver 和 TSDBDriver。 -Websocket 和 REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 - +taos-jdbcdriver 实现了 JDBC 标准的 Driver 接口,提供了 3 个实现类。 +- WebSocket 连接使用驱动类 `com.taosdata.jdbc.ws.WebSocketDriver`。 +- 原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 +- REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。 #### URL 规范 TDengine 的 JDBC URL 规范格式为: -`jdbc:[TAOS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` - -对于建立连接,原生连接与 REST 连接有细微不同。 Websocket 和 REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 - -**注**:REST 连接中增加 `batchfetch` 参数并设置为 true,将开启 WebSocket 连接。 +`jdbc:[TAOS|TAOS-WS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` **原生连接** `jdbc:TAOS://taosdemo.com:6030/power?user=root&password=taosdata`,使用了 JDBC 原生连接的 TSDBDriver,建立了到 hostname 为 taosdemo.com,端口为 6030(TDengine 的默认端口),数据库名为 power 的连接。这个 URL @@ -234,23 +232,38 @@ TDengine 中,只要保证 firstEp 和 secondEp 中一个节点有效,就可 > **注意**:这里的配置文件指的是调用 JDBC Connector 的应用程序所在机器上的配置文件,Linux OS 上默认值 /etc/taos/taos.cfg ,Windows OS 上默认值 C://TDengine/cfg/taos.cfg。 +**WebSocket 连接** +使用 JDBC WebSocket 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: -**Websocket 和 REST 连接** -使用 JDBC Websocket 或 REST 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: +1. driverClass 指定为“com.taosdata.jdbc.ws.WebSocketDriver”; +2. jdbcUrl 以“jdbc:TAOS-WS://”开头; +3. 使用 6041 作为连接端口。 + +对于 WebSocket 连接,url 中的配置参数如下: +- user:登录 TDengine 用户名,默认值 'root'。 +- password:用户登录密码,默认值 'taosdata'。 +- charset: 当开启批量拉取数据时,指定解析字符串数据的字符集。 +- batchErrorIgnore:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 SQL 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 +- httpConnectTimeout: 连接超时时间,单位 ms, 默认值为 60000。 +- messageWaitTimeout: 消息超时时间, 单位 ms, 默认值为 60000。 +- useSSL: 连接中是否使用 SSL。 + +**注意**:部分配置项(比如:locale、timezone)在 WebSocket 连接中不生效。 + +**REST 连接** +使用 JDBC REST 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: 1. driverClass 指定为“com.taosdata.jdbc.rs.RestfulDriver”; 2. jdbcUrl 以“jdbc:TAOS-RS://”开头; 3. 使用 6041 作为连接端口。 -对于 Websocket 和 REST 连接,url 中的配置参数如下: +对于 REST 连接,url 中的配置参数如下: - user:登录 TDengine 用户名,默认值 'root'。 - password:用户登录密码,默认值 'taosdata'。 -- batchfetch: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。逐行拉取结果集使用 HTTP 方式进行数据传输。JDBC REST 连接支持批量拉取数据功能。taos-jdbcdriver 与 TDengine 之间通过 WebSocket 连接进行数据传输。相较于 HTTP,WebSocket 可以使 JDBC REST 连接支持大数据量查询,并提升查询性能。 - charset: 当开启批量拉取数据时,指定解析字符串数据的字符集。 - batchErrorIgnore:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 SQL 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 - httpConnectTimeout: 连接超时时间,单位 ms, 默认值为 60000。 -- httpSocketTimeout: socket 超时时间,单位 ms,默认值为 60000。仅在 batchfetch 设置为 false 时生效。 -- messageWaitTimeout: 消息超时时间, 单位 ms, 默认值为 60000。 仅在 batchfetch 设置为 true 时生效。 +- httpSocketTimeout: socket 超时时间,单位 ms,默认值为 60000。 - useSSL: 连接中是否使用 SSL。 - httpPoolSize: REST 并发请求大小,默认 20。 @@ -272,7 +285,7 @@ TDengine 中,只要保证 firstEp 和 secondEp 中一个节点有效,就可 properties 中的配置参数如下: - TSDBDriver.PROPERTY_KEY_USER:登录 TDengine 用户名,默认值 'root'。 - TSDBDriver.PROPERTY_KEY_PASSWORD:用户登录密码,默认值 'taosdata'。 -- TSDBDriver.PROPERTY_KEY_BATCH_LOAD: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。 +- TSDBDriver.PROPERTY_KEY_BATCH_LOAD: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。因历史原因使用 REST 连接时,若设置此参数为 true 会变成 WebSocket 连接。 - TSDBDriver.PROPERTY_KEY_BATCH_ERROR_IGNORE:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 sq 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 - TSDBDriver.PROPERTY_KEY_CONFIG_DIR:仅在使用 JDBC 原生连接时生效。客户端配置文件目录路径,Linux OS 上默认值 `/etc/taos`,Windows OS 上默认值 `C:/TDengine/cfg`。 - TSDBDriver.PROPERTY_KEY_CHARSET:客户端使用的字符集,默认值为系统字符集。 @@ -280,16 +293,16 @@ properties 中的配置参数如下: - TSDBDriver.PROPERTY_KEY_TIME_ZONE:仅在使用 JDBC 原生连接时生效。 客户端使用的时区,默认值为系统当前时区。因为历史的原因,我们只支持POSIX标准的部分规范,如UTC-8(代表中国上上海), GMT-8,Asia/Shanghai 这几种形式。 - TSDBDriver.HTTP_CONNECT_TIMEOUT: 连接超时时间,单位 ms, 默认值为 60000。仅在 REST 连接时生效。 - TSDBDriver.HTTP_SOCKET_TIMEOUT: socket 超时时间,单位 ms,默认值为 60000。仅在 REST 连接且 batchfetch 设置为 false 时生效。 -- TSDBDriver.PROPERTY_KEY_MESSAGE_WAIT_TIMEOUT: 消息超时时间, 单位 ms, 默认值为 60000。 仅在 REST 连接且 batchfetch 设置为 true 时生效。 -- TSDBDriver.PROPERTY_KEY_USE_SSL: 连接中是否使用 SSL。仅在 REST 连接时生效。 +- TSDBDriver.PROPERTY_KEY_MESSAGE_WAIT_TIMEOUT: 消息超时时间, 单位 ms, 默认值为 60000。 仅 WebSocket 连接下有效。 +- TSDBDriver.PROPERTY_KEY_USE_SSL: 连接中是否使用 SSL。仅在 WebSocket/REST 连接时生效。 - TSDBDriver.HTTP_POOL_SIZE: REST 并发请求大小,默认 20。 -- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 REST/Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 -- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 REST/WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 > **注意**:启用自动重连仅对简单执行 SQL 语句以及 无模式写入、数据订阅有效。对于参数绑定无效。自动重连仅对连接建立时通过参数指定数据库有效,对后面的 `use db` 语句切换数据库无效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_INTERVAL_MS: 自动重连重试间隔,单位毫秒,默认值 2000。仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_RETRY_COUNT: 自动重连重试次数,默认值 3,仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 -- TSDBDriver.PROPERTY_KEY_DISABLE_SSL_CERT_VALIDATION: 关闭 SSL 证书验证 。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_DISABLE_SSL_CERT_VALIDATION: 关闭 SSL 证书验证 。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 此外对 JDBC 原生连接,通过指定 URL 和 Properties 还可以指定其他参数,比如日志级别、SQL 长度等。 @@ -1154,7 +1167,7 @@ JDBC 驱动支持标准的 ResultSet 接口,提供了用于读取结果集中 PreparedStatement 允许使用预编译的 SQL 语句,这可以提高性能并提供参数化查询的能力,从而增加安全性。 JDBC 驱动提供了实现 PreparedStatement 接口的两个类: 1. 对应原生连接的 TSDBPreparedStatement -2. 对应 Websocket 连接的 TSWSPreparedStatement +2. 对应 WebSocket 连接的 TSWSPreparedStatement 因 JDBC 标准没有高性能绑定数据的接口,TSDBPreparedStatement 和 TSWSPreparedStatement 都新增了一些方法,用来扩展参数绑定能力。 > **注意**:由于 PreparedStatement 继承了 Statement 接口,因此对于这部分重复的接口不再赘述,请参考 Statement 接口中对应描述。 @@ -1347,8 +1360,8 @@ JDBC 标准不支持数据订阅,因此本章所有接口都是扩展接口。 - httpConnectTimeout: 创建连接超时参数,单位 ms,默认为 5000 ms。仅在 WebSocket 连接下有效。 - messageWaitTimeout: 数据传输超时参数,单位 ms,默认为 10000 ms。仅在 WebSocket 连接下有效。 - httpPoolSize: 同一个连接下最大并行请求数。仅在 WebSocket 连接下有效。 -- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 -- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 - TSDBDriver.PROPERTY_KEY_RECONNECT_INTERVAL_MS: 自动重连重试间隔,单位毫秒,默认值 2000。仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_RETRY_COUNT: 自动重连重试次数,默认值 3,仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 diff --git a/docs/zh/14-reference/05-connector/26-rust.mdx b/docs/zh/14-reference/05-connector/26-rust.mdx index 1fcc1e3dcd..cfabed4c61 100644 --- a/docs/zh/14-reference/05-connector/26-rust.mdx +++ b/docs/zh/14-reference/05-connector/26-rust.mdx @@ -18,9 +18,9 @@ import RequestId from "./_request_id.mdx"; ## 连接方式 -`taos` 提供两种建立连接的方式。一般我们推荐使用 **Websocket 连接**。 +`taos` 提供两种建立连接的方式。一般我们推荐使用 **WebSocket 连接**。 - **原生连接**,它通过 TDengine 客户端驱动程序(taosc)连接 TDengine 运行实例。 -- **Websocket 连接**,它通过 taosAdapter 的 Websocket 接口连接 TDengine 运行实例。 +- **WebSocket 连接**,它通过 taosAdapter 的 WebSocket 接口连接 TDengine 运行实例。 你可以通过不同的 “特性(即 Cargo 关键字 `features`)” 来指定使用哪种连接器(默认同时支持)。 @@ -29,13 +29,13 @@ import RequestId from "./_request_id.mdx"; ## 支持的平台 原生连接支持的平台和 TDengine 客户端驱动支持的平台一致。 -Websocket 连接支持所有能运行 Rust 的平台。 +WebSocket 连接支持所有能运行 Rust 的平台。 ## 版本历史 | Rust 连接器版本 | TDengine 版本 | 主要功能 | | :----------------: | :--------------: | :--------------------------------------------------: | -| v0.12.3 | 3.3.0.0 or later | 优化了 Websocket 查询和插入性能,支持了 VARBINARY 和 GEOMETRY 类型 | +| v0.12.3 | 3.3.0.0 or later | 优化了 WebSocket 查询和插入性能,支持了 VARBINARY 和 GEOMETRY 类型 | | v0.12.0 | 3.2.3.0 or later | WS 支持压缩。 | | v0.11.0 | 3.2.0.0 | TMQ 功能优化。 | | v0.10.0 | 3.1.0.0 | WS endpoint 变更。 | @@ -115,15 +115,15 @@ DSN 描述字符串基本结构如下: - **driver**: 必须指定驱动名以便连接器选择何种方式创建连接,支持如下驱动名: - **taos**: 使用 TDengine 连接器驱动,默认是使用 taos 驱动。 - **tmq**: 使用 TMQ 订阅数据。 -- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 Websocket 方式建立连接。 - - **http/ws**: 使用 Websocket 创建连接。 - - **https/wss**: 在 Websocket 连接方式下显示启用 SSL/TLS 连接。 +- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 WebSocket 方式建立连接。 + - **http/ws**: 使用 WebSocket 创建连接。 + - **https/wss**: 在 WebSocket 连接方式下显示启用 SSL/TLS 连接。 - **username/password**: 用于创建连接的用户名及密码。 -- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时(`taos://`),原生连接默认为 `localhost:6030`,Websocket 连接默认为 `localhost:6041` 。 +- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时(`taos://`),原生连接默认为 `localhost:6030`,WebSocket 连接默认为 `localhost:6041` 。 - **database**: 指定默认连接的数据库名,可选参数。 - **params**:其他可选参数。 -一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 Websocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 +一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 WebSocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 #### TaosBuilder TaosBuilder 结构体主要提供了根据 DSN 构建 Taos 对象的方法,还提供了检查连接,以及获取客户端版本号等功能。 diff --git a/docs/zh/14-reference/05-connector/30-python.mdx b/docs/zh/14-reference/05-connector/30-python.mdx index 8e08bfc103..8436c30249 100644 --- a/docs/zh/14-reference/05-connector/30-python.mdx +++ b/docs/zh/14-reference/05-connector/30-python.mdx @@ -14,10 +14,10 @@ import RequestId from "./_request_id.mdx"; Python 连接器的源码托管在 [GitHub](https://github.com/taosdata/taos-connector-python)。 ## 连接方式 -`taospy`主要提供三种形式的连接器。一般我们推荐使用 **Websocket 连接**。 +`taospy`主要提供三种形式的连接器。一般我们推荐使用 **WebSocket 连接**。 - **原生连接**,对应 `taospy` 包的 `taos` 模块。通过 TDengine 客户端驱动程序(taosc)原生连接 TDengine 实例,支持数据写入、查询、数据订阅、schemaless 接口和参数绑定接口等功能。 - **REST 连接**,对应 `taospy` 包的 `taosrest` 模块。通过 taosAdapter 提供的 HTTP 接口连接 TDengine 实例,不支持 schemaless 和数据订阅等特性。 -- **Websocket 连接**,对应 `taos-ws-py` 包,可以选装。通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。 +- **WebSocket 连接**,对应 `taos-ws-py` 包,可以选装。通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) @@ -48,9 +48,9 @@ Python 连接器的源码托管在 [GitHub](https://github.com/taosdata/taos-con |2.7.9|数据订阅支持获取消费进度和重置消费进度| |2.7.8|新增 `execute_many`| -|Python Websocket Connector 版本|主要变化| +|Python WebSocket Connector 版本|主要变化| |:----------------------------:|:-----:| -|0.3.2|优化 Websocket sql 查询和插入性能,修改 readme 和 文档,修复已知问题| +|0.3.2|优化 WebSocket sql 查询和插入性能,修改 readme 和 文档,修复已知问题| |0.2.9|已知问题修复| |0.2.5|1. 数据订阅支持获取消费进度和重置消费进度
2. 支持 schemaless
3. 支持 STMT| |0.2.4|数据订阅新增取消订阅方法| diff --git a/docs/zh/14-reference/05-connector/35-node.mdx b/docs/zh/14-reference/05-connector/35-node.mdx index bd2ca537e3..6ac34d2471 100644 --- a/docs/zh/14-reference/05-connector/35-node.mdx +++ b/docs/zh/14-reference/05-connector/35-node.mdx @@ -14,7 +14,7 @@ Node.js 连接器源码托管在 [GitHub](https://github.com/taosdata/taos-conne ## 连接方式 -Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例。 +Node.js 连接器目前仅支持 WebSocket 连接器, 其通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例。 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) @@ -48,7 +48,7 @@ Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter | 107 | unknown sql type in tdengine | 请检查 TDengine 支持的 Data Type 类型。 | | 108 | connection has been closed | 连接已经关闭,请检查 Connection 是否关闭后再次使用,或是连接是否正常。 | | 109 | fetch block data parse fail | 获取到的查询数据,解析失败 | -| 110 | websocket connection has reached its maximum limit | Websocket 连接达到上限 | +| 110 | websocket connection has reached its maximum limit | WebSocket 连接达到上限 | - [TDengine Node.js Connector Error Code](https://github.com/taosdata/taos-connector-node/blob/main/nodejs/src/common/wsError.ts) - TDengine 其他功能模块的报错,请参考 [错误码](../../../reference/error-code) @@ -104,7 +104,7 @@ Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter ## API 参考 -Node.js 连接器(`@tdengine/websocket`), 其通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例。 +Node.js 连接器(`@tdengine/websocket`), 其通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例。 ### URL 规范 diff --git a/docs/zh/14-reference/05-connector/40-csharp.mdx b/docs/zh/14-reference/05-connector/40-csharp.mdx index 93f592fdd0..e4e778eeff 100644 --- a/docs/zh/14-reference/05-connector/40-csharp.mdx +++ b/docs/zh/14-reference/05-connector/40-csharp.mdx @@ -14,7 +14,7 @@ import RequestId from "./_request_id.mdx"; `TDengine.Connector` 提供两种形式的连接器 * **原生连接**,通过 TDengine 客户端驱动程序(taosc)原生连接 TDengine 实例,支持数据写入、查询、数据订阅、schemaless 接口和参数绑定接口等功能。 -* **Websocket 连接**,通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。(自 v3.0.1 起) +* **WebSocket 连接**,通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。(自 v3.0.1 起) 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) diff --git a/docs/zh/14-reference/05-connector/index.md b/docs/zh/14-reference/05-connector/index.md index 04a2ef6c1f..bd2cff6a3d 100644 --- a/docs/zh/14-reference/05-connector/index.md +++ b/docs/zh/14-reference/05-connector/index.md @@ -62,7 +62,7 @@ TDengine 版本更新往往会增加新的功能特性,列表中的连接器 | **连接管理** | 支持 | 支持 | 支持 | | **执行 SQL** | 支持 | 支持 | 支持 | -### 使用 Websocket 接口 +### 使用 WebSocket 接口 | **功能特性** | **Java** | **Python** | **Go** | **C#** | **Node.js** | **Rust** | **C/C++** | | ------------------- | -------- | ---------- | ------ | ------ | ----------- | -------- | --------- | From c377bb3514bb80f2f6807e18fe7f3d75fadb7ca5 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 17 Oct 2024 08:44:20 +0800 Subject: [PATCH 024/102] tcs/test: ut for linux only --- source/libs/tcs/test/CMakeLists.txt | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/source/libs/tcs/test/CMakeLists.txt b/source/libs/tcs/test/CMakeLists.txt index f0a5fb97a6..1252736b33 100644 --- a/source/libs/tcs/test/CMakeLists.txt +++ b/source/libs/tcs/test/CMakeLists.txt @@ -1,3 +1,5 @@ +if (TD_LINUX) + aux_source_directory(. TCS_TEST_SRC) add_executable(tcsTest ${TCS_TEST_SRC}) @@ -9,10 +11,12 @@ target_include_directories(tcsTest target_link_libraries(tcsTest tcs - common gtest_main + gtest_main ) enable_testing() add_test( NAME tcs_test COMMAND tcsTest ) + +endif() From 1b4c2faf268d7fd5aa664640ff7b3d89b810ca3e Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 17 Oct 2024 09:43:18 +0800 Subject: [PATCH 025/102] tcs/stream: merge stream apis into tcs main module --- source/libs/tcs/inc/tcsInt.h | 2 -- source/libs/tcs/src/tcs.c | 14 ++++++++++++++ source/libs/tcs/src/tcsStream.c | 29 ----------------------------- 3 files changed, 14 insertions(+), 31 deletions(-) delete mode 100644 source/libs/tcs/src/tcsStream.c diff --git a/source/libs/tcs/inc/tcsInt.h b/source/libs/tcs/inc/tcsInt.h index b24a47aa98..a7e304b544 100644 --- a/source/libs/tcs/inc/tcsInt.h +++ b/source/libs/tcs/inc/tcsInt.h @@ -26,8 +26,6 @@ extern "C" { #endif -extern int8_t tsS3Ablob; - typedef enum { TOS_PROTO_NIL, TOS_PROTO_S3, diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index db02ca21fa..1716228d5c 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -24,6 +24,8 @@ STcs tcs; +extern int8_t tsS3Ablob; + int32_t tcsInit() { int32_t code = 0; @@ -105,3 +107,15 @@ int32_t tcsGetObjectBlock(const char* object_name, int64_t offset, int64_t size, } void tcsDeleteObjectsByPrefix(const char* prefix) { return tcs.DeleteObjectsByPrefix(prefix); } + +int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { + return tcs.PutObjectFromFile2(file, object, withcp); +} + +int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } + +int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } + +int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { + return tcs.GetObjectToFile(object_name, fileName); +} diff --git a/source/libs/tcs/src/tcsStream.c b/source/libs/tcs/src/tcsStream.c deleted file mode 100644 index 7cd4647ddd..0000000000 --- a/source/libs/tcs/src/tcsStream.c +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2019 TAOS Data, Inc. - * - * This program is free software: you can use, redistribute, and/or modify - * it under the terms of the GNU Affero General Public License, version 3 - * or later ("AGPL"), as published by the Free Software Foundation. - * - * This program is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -#include "tcs.h" -#include "tcsInt.h" - -int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { - return tcs.PutObjectFromFile2(file, object, withcp); -} - -int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } - -int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } - -int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { - return tcs.GetObjectToFile(object_name, fileName); -} From b6cfa2c7f11f68134a204ffd2bb736f43eeffebc Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 17 Oct 2024 09:48:19 +0800 Subject: [PATCH 026/102] tcs/test: extern tsS3Ablob from ut --- source/libs/tcs/inc/tcsInt.h | 2 ++ source/libs/tcs/src/tcs.c | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/source/libs/tcs/inc/tcsInt.h b/source/libs/tcs/inc/tcsInt.h index a7e304b544..b24a47aa98 100644 --- a/source/libs/tcs/inc/tcsInt.h +++ b/source/libs/tcs/inc/tcsInt.h @@ -26,6 +26,8 @@ extern "C" { #endif +extern int8_t tsS3Ablob; + typedef enum { TOS_PROTO_NIL, TOS_PROTO_S3, diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index 1716228d5c..73cb64c34d 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -24,8 +24,6 @@ STcs tcs; -extern int8_t tsS3Ablob; - int32_t tcsInit() { int32_t code = 0; From 12318f3a6df2aa0c357fc1ed5a5db4fd5516e4f5 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 17 Oct 2024 14:40:26 +0800 Subject: [PATCH 027/102] tcs/test: disable test cases for ci ut run --- source/libs/tcs/src/tcs.c | 14 -------------- source/libs/tcs/src/tcsStream.c | 31 +++++++++++++++++++++++++++++++ source/libs/tcs/test/tcsTest.cpp | 8 ++++---- 3 files changed, 35 insertions(+), 18 deletions(-) create mode 100644 source/libs/tcs/src/tcsStream.c diff --git a/source/libs/tcs/src/tcs.c b/source/libs/tcs/src/tcs.c index 73cb64c34d..a668eac60f 100644 --- a/source/libs/tcs/src/tcs.c +++ b/source/libs/tcs/src/tcs.c @@ -22,8 +22,6 @@ #include "az.h" #include "cos.h" -STcs tcs; - int32_t tcsInit() { int32_t code = 0; @@ -105,15 +103,3 @@ int32_t tcsGetObjectBlock(const char* object_name, int64_t offset, int64_t size, } void tcsDeleteObjectsByPrefix(const char* prefix) { return tcs.DeleteObjectsByPrefix(prefix); } - -int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { - return tcs.PutObjectFromFile2(file, object, withcp); -} - -int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } - -int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } - -int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { - return tcs.GetObjectToFile(object_name, fileName); -} diff --git a/source/libs/tcs/src/tcsStream.c b/source/libs/tcs/src/tcsStream.c new file mode 100644 index 0000000000..f73bb028ba --- /dev/null +++ b/source/libs/tcs/src/tcsStream.c @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#include "tcs.h" +#include "tcsInt.h" + +STcs tcs; + +int32_t tcsPutObjectFromFile2(const char* file, const char* object, int8_t withcp) { + return tcs.PutObjectFromFile2(file, object, withcp); +} + +int32_t tcsGetObjectsByPrefix(const char* prefix, const char* path) { return tcs.GetObjectsByPrefix(prefix, path); } + +int32_t tcsDeleteObjects(const char* object_name[], int nobject) { return tcs.DeleteObjects(object_name, nobject); } + +int32_t tcsGetObjectToFile(const char* object_name, const char* fileName) { + return tcs.GetObjectToFile(object_name, fileName); +} diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index e86c9e02b3..0eb0b4d071 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -59,8 +59,8 @@ int32_t tcsInitEnv(int8_t isBlob) { return code; } -// TEST(TcsTest, DISABLE_InterfaceTest) { -TEST(TcsTest, InterfaceTest) { +TEST(TcsTest, DISABLE_InterfaceTest) { + // TEST(TcsTest, InterfaceTest) { int code = 0; code = tcsInitEnv(true); @@ -97,8 +97,8 @@ TEST(TcsTest, InterfaceTest) { tcsUninit(); } -// TEST(TcsTest, DISABLE_InterfaceNonBlobTest) { -TEST(TcsTest, InterfaceNonBlobTest) { +TEST(TcsTest, DISABLE_InterfaceNonBlobTest) { + // TEST(TcsTest, InterfaceNonBlobTest) { int code = 0; code = tcsInitEnv(false); From 69bc051b9a74dab38bb23f7947a5f8d1139f0921 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 17 Oct 2024 15:44:31 +0800 Subject: [PATCH 028/102] tcs/test: cleanup unused cases --- source/libs/tcs/test/tcsTest.cpp | 396 +------------------------------ 1 file changed, 2 insertions(+), 394 deletions(-) diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index 0eb0b4d071..33566f6400 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -59,7 +59,7 @@ int32_t tcsInitEnv(int8_t isBlob) { return code; } -TEST(TcsTest, DISABLE_InterfaceTest) { +TEST(TcsTest, DISABLED_InterfaceTest) { // TEST(TcsTest, InterfaceTest) { int code = 0; @@ -97,7 +97,7 @@ TEST(TcsTest, DISABLE_InterfaceTest) { tcsUninit(); } -TEST(TcsTest, DISABLE_InterfaceNonBlobTest) { +TEST(TcsTest, DISABLED_InterfaceNonBlobTest) { // TEST(TcsTest, InterfaceNonBlobTest) { int code = 0; @@ -134,395 +134,3 @@ TEST(TcsTest, DISABLE_InterfaceNonBlobTest) { tcsUninit(); } - -/* -#include "walInt.h" -const char* ranStr = "tvapq02tcp"; -const int ranStrLen = strlen(ranStr); -SWalSyncInfo syncMeta = {0}; -class WalCleanEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalCleanDeleteEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalKeepEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - void SetUp() override { - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -class WalRetentionEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - static void TearDownTestCase() { walCleanUp(); } - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - void SetUp() override { - SWalCfg cfg; - cfg.rollPeriod = -1; - cfg.segSize = -1; - cfg.retentionPeriod = -1; - cfg.retentionSize = 0; - cfg.rollPeriod = 0; - cfg.vgId = 0; - cfg.level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, &cfg); - ASSERT(pWal != NULL); - } - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; -TEST_F(WalCleanEnv, createNew) { - walRollFileInfo(pWal); - ASSERT(pWal->fileInfoSet != NULL); - ASSERT_EQ(pWal->fileInfoSet->size, 1); - SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); - ASSERT_EQ(pInfo->firstVer, 0); - ASSERT_EQ(pInfo->lastVer, -1); - ASSERT_EQ(pInfo->closeTs, -1); - ASSERT_EQ(pInfo->fileSize, 0); -} -TEST_F(WalCleanEnv, serialize) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - char* ss = NULL; - code = walMetaSerialize(pWal, &ss); - ASSERT(code == 0); - printf("%s\n", ss); - taosMemoryFree(ss); - code = walSaveMeta(pWal); - ASSERT(code == 0); -} -TEST_F(WalCleanEnv, removeOldMeta) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); - code = walSaveMeta(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walSaveMeta(pWal); - ASSERT(code == 0); -} -TEST_F(WalKeepEnv, readOldMeta) { - walResetEnv(); - int code; - syncMeta.isWeek = -1; - syncMeta.seqNum = UINT64_MAX; - syncMeta.term = UINT64_MAX; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - char* oldss = NULL; - code = walMetaSerialize(pWal, &oldss); - ASSERT(code == 0); - TearDown(); - SetUp(); - ASSERT_EQ(pWal->vers.firstVer, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - char* newss = NULL; - code = walMetaSerialize(pWal, &newss); - ASSERT(code == 0); - int len = strlen(oldss); - ASSERT_EQ(len, strlen(newss)); - for (int i = 0; i < len; i++) { - EXPECT_EQ(oldss[i], newss[i]); - } - taosMemoryFree(oldss); - taosMemoryFree(newss); -} -TEST_F(WalCleanEnv, write) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanEnv, rollback) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 5); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 4); - code = walRollback(pWal, 3); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 2); - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanEnv, rollbackMultiFile) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - if (i == 5) { - walBeginSnapshot(pWal, i, 0); - walEndSnapshot(pWal); - } - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 6); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 5); - code = walRollback(pWal, 5); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 5); - code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 6); - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalCleanDeleteEnv, roll) { - int code; - int i; - for (i = 0; i < 100; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); - walEndSnapshot(pWal); - ASSERT_EQ(pWal->vers.snapshotVer, i - 1); - ASSERT_EQ(pWal->vers.verInSnapshotting, -1); - code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_NE(code, 0); - for (; i < 200; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - code = walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(code, 0); - code = walEndSnapshot(pWal); - ASSERT_EQ(code, 0); -} -TEST_F(WalKeepEnv, readHandleRead) { - walResetEnv(); - int code; - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} -TEST_F(WalRetentionEnv, repairMeta1) { - walResetEnv(); - int code; - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - TearDown(); - // getchar(); - char buf[100]; - sprintf(buf, "%s/meta-ver%d", pathName, 0); - taosRemoveFile(buf); - sprintf(buf, "%s/meta-ver%d", pathName, 1); - taosRemoveFile(buf); - SetUp(); - // getchar(); - ASSERT_EQ(pWal->vers.lastVer, 99); - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - for (i = 100; i < 200; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 200; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} -*/ From eeab3a8a8b1641bdb379c397122c2017e7b76f71 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 18 Oct 2024 08:44:38 +0800 Subject: [PATCH 029/102] az/log: use az prefix log instead of u-prefixed ones --- include/util/tlog.h | 1 + source/common/src/tglobal.c | 22 +++++++++++------- source/libs/azure/inc/azInt.h | 42 +++++++++++++++++++++++++++++++++ source/libs/azure/src/az.cpp | 44 +++++++++++++++++------------------ source/util/src/tlog.c | 25 ++++++++++---------- 5 files changed, 92 insertions(+), 42 deletions(-) create mode 100644 source/libs/azure/inc/azInt.h diff --git a/include/util/tlog.h b/include/util/tlog.h index e80e94de32..b8d096b7ce 100644 --- a/include/util/tlog.h +++ b/include/util/tlog.h @@ -57,6 +57,7 @@ extern int32_t rpcDebugFlag; extern int32_t qDebugFlag; extern int32_t stDebugFlag; extern int32_t wDebugFlag; +extern int32_t azDebugFlag; extern int32_t sDebugFlag; extern int32_t tsdbDebugFlag; extern int32_t tqDebugFlag; diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c index 8254082561..f9fd0ed0ba 100644 --- a/source/common/src/tglobal.c +++ b/source/common/src/tglobal.c @@ -542,6 +542,7 @@ static int32_t taosAddServerLogCfg(SConfig *pCfg) { TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "vDebugFlag", vDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "mDebugFlag", mDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "wDebugFlag", wDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); + TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "azDebugFlag", azDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "sDebugFlag", sDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "tsdbDebugFlag", tsdbDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); TAOS_CHECK_RETURN(cfgAddInt32(pCfg, "tqDebugFlag", tqDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER)); @@ -1044,6 +1045,9 @@ static int32_t taosSetServerLogCfg(SConfig *pCfg) { TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "wDebugFlag"); wDebugFlag = pItem->i32; + TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "azDebugFlag"); + azDebugFlag = pItem->i32; + TAOS_CHECK_GET_CFG_ITEM(pCfg, pItem, "sDebugFlag"); sDebugFlag = pItem->i32; @@ -1729,7 +1733,7 @@ int32_t taosReadDataFolder(const char *cfgDir, const char **envCmd, const char * TAOS_CHECK_GOTO(cfgAddDir(pCfg, "dataDir", tsDataDir, CFG_SCOPE_SERVER, CFG_DYN_NONE), NULL, _exit); TAOS_CHECK_GOTO(cfgAddInt32(pCfg, "debugFlag", dDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER), NULL, _exit); - TAOS_CHECK_GOTO(cfgAddInt32(pCfg, "dDebugFlag", dDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER) ,NULL, _exit); + TAOS_CHECK_GOTO(cfgAddInt32(pCfg, "dDebugFlag", dDebugFlag, 0, 255, CFG_SCOPE_SERVER, CFG_DYN_SERVER), NULL, _exit); if ((code = taosLoadCfg(pCfg, envCmd, cfgDir, envFile, apolloUrl)) != 0) { (void)printf("failed to load cfg since %s\n", tstrerror(code)); @@ -1956,13 +1960,14 @@ static int32_t taosCfgDynamicOptionsForServer(SConfig *pCfg, const char *name) { { // 'bool/int32_t/int64_t/float/double' variables with general modification function static OptionNameAndVar debugOptions[] = { - {"dDebugFlag", &dDebugFlag}, {"vDebugFlag", &vDebugFlag}, {"mDebugFlag", &mDebugFlag}, - {"wDebugFlag", &wDebugFlag}, {"sDebugFlag", &sDebugFlag}, {"tsdbDebugFlag", &tsdbDebugFlag}, - {"tqDebugFlag", &tqDebugFlag}, {"fsDebugFlag", &fsDebugFlag}, {"udfDebugFlag", &udfDebugFlag}, - {"smaDebugFlag", &smaDebugFlag}, {"idxDebugFlag", &idxDebugFlag}, {"tdbDebugFlag", &tdbDebugFlag}, - {"tmrDebugFlag", &tmrDebugFlag}, {"uDebugFlag", &uDebugFlag}, {"smaDebugFlag", &smaDebugFlag}, - {"rpcDebugFlag", &rpcDebugFlag}, {"qDebugFlag", &qDebugFlag}, {"metaDebugFlag", &metaDebugFlag}, - {"stDebugFlag", &stDebugFlag}, {"sndDebugFlag", &sndDebugFlag}, {"tqClientDebug", &tqClientDebug}, + {"dDebugFlag", &dDebugFlag}, {"vDebugFlag", &vDebugFlag}, {"mDebugFlag", &mDebugFlag}, + {"wDebugFlag", &wDebugFlag}, {"azDebugFlag", &azDebugFlag}, {"sDebugFlag", &sDebugFlag}, + {"tsdbDebugFlag", &tsdbDebugFlag}, {"tqDebugFlag", &tqDebugFlag}, {"fsDebugFlag", &fsDebugFlag}, + {"udfDebugFlag", &udfDebugFlag}, {"smaDebugFlag", &smaDebugFlag}, {"idxDebugFlag", &idxDebugFlag}, + {"tdbDebugFlag", &tdbDebugFlag}, {"tmrDebugFlag", &tmrDebugFlag}, {"uDebugFlag", &uDebugFlag}, + {"smaDebugFlag", &smaDebugFlag}, {"rpcDebugFlag", &rpcDebugFlag}, {"qDebugFlag", &qDebugFlag}, + {"metaDebugFlag", &metaDebugFlag}, {"stDebugFlag", &stDebugFlag}, {"sndDebugFlag", &sndDebugFlag}, + {"tqClientDebug", &tqClientDebug}, }; static OptionNameAndVar options[] = {{"audit", &tsEnableAudit}, @@ -2340,6 +2345,7 @@ static int32_t taosSetAllDebugFlag(SConfig *pCfg, int32_t flag) { taosCheckAndSetDebugFlag(&vDebugFlag, "vDebugFlag", flag, noNeedToSetVars); taosCheckAndSetDebugFlag(&mDebugFlag, "mDebugFlag", flag, noNeedToSetVars); taosCheckAndSetDebugFlag(&wDebugFlag, "wDebugFlag", flag, noNeedToSetVars); + taosCheckAndSetDebugFlag(&azDebugFlag, "azDebugFlag", flag, noNeedToSetVars); taosCheckAndSetDebugFlag(&sDebugFlag, "sDebugFlag", flag, noNeedToSetVars); taosCheckAndSetDebugFlag(&tsdbDebugFlag, "tsdbDebugFlag", flag, noNeedToSetVars); taosCheckAndSetDebugFlag(&tqDebugFlag, "tqDebugFlag", flag, noNeedToSetVars); diff --git a/source/libs/azure/inc/azInt.h b/source/libs/azure/inc/azInt.h new file mode 100644 index 0000000000..3538e925c7 --- /dev/null +++ b/source/libs/azure/inc/azInt.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +#ifndef _TD_AZ_INT_H_ +#define _TD_AZ_INT_H_ + +#include "os.h" +#include "tarray.h" +#include "tdef.h" +#include "tlog.h" +#include "tmsg.h" + +#ifdef __cplusplus +extern "C" { +#endif + +// clang-format off +#define azFatal(...) { if (azDebugFlag & DEBUG_FATAL) { taosPrintLog("AZR FATAL ", DEBUG_FATAL, 255, __VA_ARGS__); }} +#define azError(...) { if (azDebugFlag & DEBUG_ERROR) { taosPrintLog("AZR ERROR ", DEBUG_ERROR, 255, __VA_ARGS__); }} +#define azWarn(...) { if (azDebugFlag & DEBUG_WARN) { taosPrintLog("AZR WARN ", DEBUG_WARN, 255, __VA_ARGS__); }} +#define azInfo(...) { if (azDebugFlag & DEBUG_INFO) { taosPrintLog("AZR ", DEBUG_INFO, 255, __VA_ARGS__); }} +#define azDebug(...) { if (azDebugFlag & DEBUG_DEBUG) { taosPrintLog("AZR ", DEBUG_DEBUG, azDebugFlag, __VA_ARGS__); }} +#define azTrace(...) { if (azDebugFlag & DEBUG_TRACE) { taosPrintLog("AZR ", DEBUG_TRACE, azDebugFlag, __VA_ARGS__); }} +// clang-format on + +#ifdef __cplusplus +} +#endif + +#endif // _TD_AZ_INT_H_ diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index b05f5be2ca..3da4b6e808 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -16,6 +16,7 @@ #define ALLOW_FORBID_FUNC #include "az.h" +#include "azInt.h" #include "os.h" #include "taoserror.h" @@ -88,9 +89,9 @@ static int32_t azListBucket(char const *bucketname) { } } } catch (const Azure::Core::RequestFailedException &e) { - uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), - e.ReasonPhrase.c_str()); - // uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); + azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + // azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); code = TAOS_SYSTEM_ERROR(EIO); TAOS_RETURN(code); @@ -105,7 +106,6 @@ int32_t azCheckCfg() { // for (; i < tsS3EpNum; i++) { (void)fprintf(stdout, "test s3 ep (%d/%d):\n", i + 1, tsS3EpNum); - // s3DumpCfgByEp(i); azDumpCfgByEp(0); // test put @@ -131,7 +131,7 @@ int32_t azCheckCfg() { TdFilePtr fp = taosOpenFile(path, TD_FILE_CREATE | TD_FILE_WRITE | TD_FILE_READ | TD_FILE_TRUNC); if (!fp) { (void)fprintf(stderr, "failed to open test file: %s.\n", path); - // uError("ERROR: %s Failed to open %s", __func__, path); + // azError("ERROR: %s Failed to open %s", __func__, path); TAOS_CHECK_GOTO(terrno, &lino, _next); } if (taosWriteFile(fp, testdata, strlen(testdata)) < 0) { @@ -258,7 +258,7 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int std::cout << e.what() << std::endl; */ code = TAOS_SYSTEM_ERROR(EIO); - uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); TAOS_RETURN(code); } @@ -296,16 +296,16 @@ int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t si auto res = blobClient.DownloadTo(buf, size, options); if (check && res.Value.ContentRange.Length.Value() != size) { code = TAOS_SYSTEM_ERROR(EIO); - uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); TAOS_RETURN(code); } *ppBlock = buf; } catch (const Azure::Core::RequestFailedException &e) { - uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), - e.ReasonPhrase.c_str()); + azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); code = TAOS_SYSTEM_ERROR(EIO); - uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); if (buf) { taosMemoryFree(buf); @@ -368,9 +368,9 @@ void azDeleteObjectsByPrefix(const char *prefix) { blobClient.Delete(); } } catch (const Azure::Core::RequestFailedException &e) { - uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), - e.ReasonPhrase.c_str()); - // uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); + azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); + // azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); } } @@ -379,19 +379,19 @@ int32_t azPutObjectFromFile2(const char *file, const char *object, int8_t withcp uint64_t contentLength = 0; if (taosStatFile(file, (int64_t *)&contentLength, NULL, NULL) < 0) { - uError("ERROR: %s Failed to stat file %s: ", __func__, file); + azError("ERROR: %s Failed to stat file %s: ", __func__, file); TAOS_RETURN(terrno); } code = azPutObjectFromFileOffset(file, object, 0, contentLength); if (code != 0) { - uError("ERROR: %s Failed to put file %s: ", __func__, file); + azError("ERROR: %s Failed to put file %s: ", __func__, file); TAOS_CHECK_GOTO(code, &lino, _exit); } _exit: if (code) { - uError("%s failed at line %d since %s", __func__, lino, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, lino, tstrerror(code)); } return 0; @@ -417,14 +417,14 @@ int32_t azGetObjectToFile(const char *object_name, const char *fileName) { auto res = blobClient.DownloadTo(fileName); if (res.Value.ContentRange.Length.Value() <= 0) { code = TAOS_SYSTEM_ERROR(EIO); - uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); TAOS_RETURN(code); } } catch (const Azure::Core::RequestFailedException &e) { - uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), - e.ReasonPhrase.c_str()); + azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); code = TAOS_SYSTEM_ERROR(EIO); - uError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); TAOS_RETURN(code); } @@ -470,8 +470,8 @@ int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { } } } catch (const Azure::Core::RequestFailedException &e) { - uError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), - e.ReasonPhrase.c_str()); + azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), + e.ReasonPhrase.c_str()); TAOS_RETURN(TSDB_CODE_FAILED); } diff --git a/source/util/src/tlog.c b/source/util/src/tlog.c index 45c8a2f6c2..3ca148a625 100644 --- a/source/util/src/tlog.c +++ b/source/util/src/tlog.c @@ -26,7 +26,7 @@ #define LOG_MAX_LINE_DUMP_SIZE (1024 * 1024) #define LOG_MAX_LINE_DUMP_BUFFER_SIZE (LOG_MAX_LINE_DUMP_SIZE + 128) -#define LOG_FILE_DAY_LEN 64 +#define LOG_FILE_DAY_LEN 64 #define LOG_DEFAULT_BUF_SIZE (20 * 1024 * 1024) // 20MB #define LOG_SLOW_BUF_SIZE (10 * 1024 * 1024) // 10MB @@ -113,6 +113,7 @@ int32_t rpcDebugFlag = 131; int32_t qDebugFlag = 131; int32_t stDebugFlag = 131; int32_t wDebugFlag = 131; +int32_t azDebugFlag = 131; int32_t sDebugFlag = 131; int32_t tsdbDebugFlag = 131; int32_t tdbDebugFlag = 131; @@ -151,7 +152,7 @@ static int32_t taosStartLog() { return 0; } -static void getDay(char* buf, int32_t bufSize){ +static void getDay(char *buf, int32_t bufSize) { time_t t = taosTime(NULL); struct tm tmInfo; if (taosLocalTime(&t, &tmInfo, buf, bufSize) != NULL) { @@ -172,7 +173,7 @@ static int64_t getTimestampToday() { return (int64_t)taosMktime(&tm); } -static void getFullPathName(char* fullName, const char* logName){ +static void getFullPathName(char *fullName, const char *logName) { if (strlen(tsLogDir) != 0) { char lastC = tsLogDir[strlen(tsLogDir) - 1]; if (lastC == '\\' || lastC == '/') { @@ -225,7 +226,7 @@ int32_t taosInitLog(const char *logName, int32_t maxFiles, bool tsc) { } TAOS_CHECK_RETURN(taosInitNormalLog(logName, maxFiles)); - if (tsc){ + if (tsc) { TAOS_CHECK_RETURN(taosInitSlowLog()); } TAOS_CHECK_RETURN(taosStartLog()); @@ -397,7 +398,7 @@ static int32_t taosOpenNewLogFile() { OldFileKeeper *oldFileKeeper = taosOpenNewFile(); if (!oldFileKeeper) { - TAOS_UNUSED(taosThreadMutexUnlock(&tsLogObj.logMutex)); + TAOS_UNUSED(taosThreadMutexUnlock(&tsLogObj.logMutex)); return terrno; } if (taosThreadCreate(&thread, &attr, taosThreadToCloseOldFile, oldFileKeeper) != 0) { @@ -433,7 +434,7 @@ static void taosOpenNewSlowLogFile() { char day[TD_TIME_STR_LEN] = {0}; getDay(day, sizeof(day)); TdFilePtr pFile = NULL; - char name[PATH_MAX + TD_TIME_STR_LEN] = {0}; + char name[PATH_MAX + TD_TIME_STR_LEN] = {0}; (void)snprintf(name, PATH_MAX + TD_TIME_STR_LEN, "%s.%s", tsLogObj.slowLogName, day); pFile = taosOpenFile(name, TD_FILE_CREATE | TD_FILE_WRITE | TD_FILE_APPEND); if (pFile == NULL) { @@ -455,7 +456,7 @@ void taosResetLog() { if (tsLogObj.logHandle) { int32_t code = taosOpenNewLogFile(); - if(code != 0){ + if (code != 0) { uError("failed to open new log file, reason:%s", tstrerror(code)); } uInfo("=================================="); @@ -508,12 +509,12 @@ static void decideLogFileName(const char *fn, int32_t maxFileNum) { } } -static void decideLogFileNameFlag(){ +static void decideLogFileNameFlag() { char name[PATH_MAX + 50] = "\0"; int32_t logstat0_mtime = 0; int32_t logstat1_mtime = 0; - bool log0Exist = false; - bool log1Exist = false; + bool log0Exist = false; + bool log1Exist = false; if (strlen(tsLogObj.logName) < PATH_MAX + 50 - 2) { strcpy(name, tsLogObj.logName); @@ -535,7 +536,7 @@ static void decideLogFileNameFlag(){ } } -static void processLogFileName(const char* logName , int32_t maxFileNum){ +static void processLogFileName(const char *logName, int32_t maxFileNum) { char fullName[PATH_MAX] = {0}; getFullPathName(fullName, logName); decideLogFileName(fullName, maxFileNum); @@ -872,7 +873,7 @@ static int32_t taosGetLogRemainSize(SLogBuff *pLogBuf, int32_t start, int32_t en return rSize >= 0 ? rSize : LOG_BUF_SIZE(pLogBuf) + rSize; } -static void taosWriteSlowLog(SLogBuff *pLogBuf){ +static void taosWriteSlowLog(SLogBuff *pLogBuf) { int32_t lock = atomic_val_compare_exchange_32(&pLogBuf->lock, 0, 1); if (lock == 1) return; taosWriteLog(pLogBuf); From 4cc65adc1fec57d4a61e07dd888f74f56b9d5ccd Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 18 Oct 2024 09:57:12 +0800 Subject: [PATCH 030/102] az/log: new debug flag azDebugFlag for az logging --- contrib/test/azure/CMakeLists.txt | 3 --- contrib/test/azure/main.cpp | 39 ++++++++++++------------------- tests/system-test/2-query/db.py | 2 +- 3 files changed, 16 insertions(+), 28 deletions(-) diff --git a/contrib/test/azure/CMakeLists.txt b/contrib/test/azure/CMakeLists.txt index b3db1dffce..35c87312d0 100644 --- a/contrib/test/azure/CMakeLists.txt +++ b/contrib/test/azure/CMakeLists.txt @@ -6,9 +6,6 @@ add_executable ( main.cpp ) -# Link to Azure SDK -#target_link_libraries(application _azure_sdk) - find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) #find_library(XML2_LIBRARY xml2) diff --git a/contrib/test/azure/main.cpp b/contrib/test/azure/main.cpp index 5d52801329..09eadecd69 100644 --- a/contrib/test/azure/main.cpp +++ b/contrib/test/azure/main.cpp @@ -1,6 +1,3 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - /** * @file * @brief Application that consumes the Azure SDK for C++. @@ -16,36 +13,30 @@ using namespace Azure::Storage::Blobs; -int main(int argc, char* argv[]) -{ +int main(int argc, char* argv[]) { (void)argc; (void)argv; /**************** Container SDK client ************************/ /**************** Create container ************************/ - try - { - auto containerClient = BlobContainerClient::CreateFromConnectionString( - std::getenv("STORAGE_CONNECTION_STRING"), "td-test"); - //containerClient.CreateIfNotExists(); + try { + auto containerClient = + BlobContainerClient::CreateFromConnectionString(std::getenv("STORAGE_CONNECTION_STRING"), "td-test"); + // containerClient.CreateIfNotExists(); /**************** Container SDK client ************************/ /**************** list blobs (one page) ******************/ - //auto response = containerClient.ListBlobsSinglePage(); - //auto response = containerClient.ListBlobs(); - //auto blobListPage = response.Value; - //auto blobListPage = response.Blobs; - for (auto page = containerClient.ListBlobs(/*options*/); page.HasPage(); page.MoveToNextPage()) - { - for (auto& blob : page.Blobs) - { - std::cout << blob.Name << std::endl; - } - } + // auto response = containerClient.ListBlobsSinglePage(); + // auto response = containerClient.ListBlobs(); + // auto blobListPage = response.Value; + // auto blobListPage = response.Blobs; + for (auto page = containerClient.ListBlobs(/*options*/); page.HasPage(); page.MoveToNextPage()) { + for (auto& blob : page.Blobs) { + std::cout << blob.Name << std::endl; + } + } - } - catch (const std::exception& ex) - { + } catch (const std::exception& ex) { std::cout << ex.what(); return 1; } diff --git a/tests/system-test/2-query/db.py b/tests/system-test/2-query/db.py index 588609e524..1964cea51f 100644 --- a/tests/system-test/2-query/db.py +++ b/tests/system-test/2-query/db.py @@ -57,7 +57,7 @@ class TDTestCase: tdSql.checkData(0, 2, 0) tdSql.query("show dnode 1 variables like '%debugFlag'") - tdSql.checkRows(23) + tdSql.checkRows(24) tdSql.query("show dnode 1 variables like '____debugFlag'") tdSql.checkRows(2) From 1d26d0fa071ddeca7a3bf39c8cf10ede58801652 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 18 Oct 2024 15:12:53 +0800 Subject: [PATCH 031/102] az/test: test cases for az apis --- contrib/test/azure/CMakeLists.txt | 18 +- contrib/test/azure/main.cpp | 18 ++ source/libs/azure/src/az.cpp | 18 -- source/libs/azure/test/azTest.cpp | 498 ++++-------------------------- 4 files changed, 87 insertions(+), 465 deletions(-) diff --git a/contrib/test/azure/CMakeLists.txt b/contrib/test/azure/CMakeLists.txt index 35c87312d0..68571dce46 100644 --- a/contrib/test/azure/CMakeLists.txt +++ b/contrib/test/azure/CMakeLists.txt @@ -14,13 +14,13 @@ find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos #find_library(CoreFoundation_Library CoreFoundation) #find_library(SystemConfiguration_Library SystemConfiguration) - target_link_libraries( - azure-test - PRIVATE _azure_sdk - PRIVATE ${CURL_LIBRARY} - PRIVATE ${XML2_LIBRARY} - PRIVATE ${SSL_LIBRARY} - PRIVATE ${CRYPTO_LIBRARY} - PRIVATE dl - PRIVATE pthread +target_link_libraries( + azure-test + PRIVATE _azure_sdk + PRIVATE ${CURL_LIBRARY} + PRIVATE ${XML2_LIBRARY} + PRIVATE ${SSL_LIBRARY} + PRIVATE ${CRYPTO_LIBRARY} + PRIVATE dl + PRIVATE pthread ) diff --git a/contrib/test/azure/main.cpp b/contrib/test/azure/main.cpp index 09eadecd69..badfef623b 100644 --- a/contrib/test/azure/main.cpp +++ b/contrib/test/azure/main.cpp @@ -22,6 +22,9 @@ int main(int argc, char* argv[]) { try { auto containerClient = BlobContainerClient::CreateFromConnectionString(std::getenv("STORAGE_CONNECTION_STRING"), "td-test"); + + // Create the container if it does not exist + // std::cout << "Creating container: " << containerName << std::endl; // containerClient.CreateIfNotExists(); /**************** Container SDK client ************************/ @@ -30,6 +33,21 @@ int main(int argc, char* argv[]) { // auto response = containerClient.ListBlobs(); // auto blobListPage = response.Value; // auto blobListPage = response.Blobs; + //(void)_azUploadFrom(blobClient, file, offset, size); + /* + auto blockBlobClient = BlockBlobClient(endpointUrl, sharedKeyCredential); + + // Create some data to upload into the blob. + std::vector data = {1, 2, 3, 4}; + Azure::Core::IO::MemoryBodyStream stream(data); + + Azure::Response response = blockBlobClient.Upload(stream); + + Models::UploadBlockBlobResult model = response.Value; + std::cout << "Last modified date of uploaded blob: " << model.LastModified.ToString() + << std::endl; + */ + for (auto page = containerClient.ListBlobs(/*options*/); page.HasPage(); page.MoveToNextPage()) { for (auto& blob : page.Blobs) { std::cout << blob.Name << std::endl; diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 3da4b6e808..a2411c883d 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -222,10 +222,6 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int std::string containerName = tsS3BucketName; auto containerClient = blobServiceClient.GetBlobContainerClient(containerName); - // Create the container if it does not exist - // std::cout << "Creating container: " << containerName << std::endl; - // containerClient.CreateIfNotExists(); - std::string blobName = "blob.txt"; uint8_t blobContent[] = "Hello Azure!"; // Create the block blob client @@ -237,20 +233,6 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int // std::cout << "Uploading blob: " << blobName << std::endl; // blobClient.UploadFrom(blobContent, sizeof(blobContent)); blobClient.UploadFrom(file, offset, size); - //(void)_azUploadFrom(blobClient, file, offset, size); - /* - auto blockBlobClient = BlockBlobClient(endpointUrl, sharedKeyCredential); - - // Create some data to upload into the blob. - std::vector data = {1, 2, 3, 4}; - Azure::Core::IO::MemoryBodyStream stream(data); - - Azure::Response response = blockBlobClient.Upload(stream); - - Models::UploadBlockBlobResult model = response.Value; - std::cout << "Last modified date of uploaded blob: " << model.LastModified.ToString() - << std::endl; - */ } catch (const Azure::Core::RequestFailedException &e) { /* std::cout << "Status Code: " << static_cast(e.StatusCode) << ", Reason Phrase: " << e.ReasonPhrase diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp index 9e963508f8..2780cddacc 100644 --- a/source/libs/azure/test/azTest.cpp +++ b/source/libs/azure/test/azTest.cpp @@ -2,456 +2,78 @@ #include #include #include -/* -#include "walInt.h" -const char* ranStr = "tvapq02tcp"; -const int ranStrLen = strlen(ranStr); -SWalSyncInfo syncMeta = {0}; +#include "az.h" -class WalCleanEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } +extern int8_t tsS3Enabled; - static void TearDownTestCase() { walCleanUp(); } +int32_t azInitEnv() { + int32_t code = 0; - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } + extern int8_t tsS3EpNum; - void TearDown() override { - walClose(pWal); - pWal = NULL; - } + extern char tsS3Hostname[][TSDB_FQDN_LEN]; + extern char tsS3AccessKeyId[][TSDB_FQDN_LEN]; + extern char tsS3AccessKeySecret[][TSDB_FQDN_LEN]; + extern char tsS3BucketName[TSDB_FQDN_LEN]; - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; + /* TCS parameter format + tsS3Hostname[0] = "endpoint/.blob.core.windows.net"; + tsS3AccessKeyId[0] = ""; + tsS3AccessKeySecret[0] = ""; + tsS3BucketName = ""; + */ -class WalCleanDeleteEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } + const char *hostname = "endpoint/.blob.core.windows.net"; + const char *accessKeyId = ""; + const char *accessKeySecret = ""; + const char *bucketName = ""; - static void TearDownTestCase() { walCleanUp(); } + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); - void SetUp() override { - taosRemoveDir(pathName); - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } + tstrncpy(tsTempDir, "/tmp/", PATH_MAX); - void TearDown() override { - walClose(pWal); - pWal = NULL; - } + tsS3Enabled = true; - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; - -class WalKeepEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - - static void TearDownTestCase() { walCleanUp(); } - - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - - void SetUp() override { - SWalCfg* pCfg = (SWalCfg*)taosMemoryMalloc(sizeof(SWalCfg)); - memset(pCfg, 0, sizeof(SWalCfg)); - pCfg->rollPeriod = -1; - pCfg->segSize = -1; - pCfg->retentionPeriod = 0; - pCfg->retentionSize = 0; - pCfg->level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, pCfg); - taosMemoryFree(pCfg); - ASSERT(pWal != NULL); - } - - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; - -class WalRetentionEnv : public ::testing::Test { - protected: - static void SetUpTestCase() { - int code = walInit(NULL); - ASSERT(code == 0); - } - - static void TearDownTestCase() { walCleanUp(); } - - void walResetEnv() { - TearDown(); - taosRemoveDir(pathName); - SetUp(); - } - - void SetUp() override { - SWalCfg cfg; - cfg.rollPeriod = -1; - cfg.segSize = -1; - cfg.retentionPeriod = -1; - cfg.retentionSize = 0; - cfg.rollPeriod = 0; - cfg.vgId = 0; - cfg.level = TAOS_WAL_FSYNC; - pWal = walOpen(pathName, &cfg); - ASSERT(pWal != NULL); - } - - void TearDown() override { - walClose(pWal); - pWal = NULL; - } - - SWal* pWal = NULL; - const char* pathName = TD_TMP_DIR_PATH "wal_test"; -}; - -TEST_F(WalCleanEnv, createNew) { - walRollFileInfo(pWal); - ASSERT(pWal->fileInfoSet != NULL); - ASSERT_EQ(pWal->fileInfoSet->size, 1); - SWalFileInfo* pInfo = (SWalFileInfo*)taosArrayGetLast(pWal->fileInfoSet); - ASSERT_EQ(pInfo->firstVer, 0); - ASSERT_EQ(pInfo->lastVer, -1); - ASSERT_EQ(pInfo->closeTs, -1); - ASSERT_EQ(pInfo->fileSize, 0); + return code; } -TEST_F(WalCleanEnv, serialize) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); +// TEST(AzTest, DISABLED_InterfaceTest) { +TEST(AzTest, InterfaceTest) { + int code = 0; - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - char* ss = NULL; - code = walMetaSerialize(pWal, &ss); - ASSERT(code == 0); - printf("%s\n", ss); - taosMemoryFree(ss); - code = walSaveMeta(pWal); - ASSERT(code == 0); + code = azInitEnv(); + GTEST_ASSERT_EQ(code, 0); + GTEST_ASSERT_EQ(tsS3Enabled, 1); + + code = azBegin(); + GTEST_ASSERT_EQ(code, 0); + + code = azCheckCfg(); + GTEST_ASSERT_EQ(code, 0); + /* + code = azPutObjectFromFileOffset(file, object_name, offset, size); + GTEST_ASSERT_EQ(code, 0); + code = azGetObjectBlock(object_name, offset, size, check, ppBlock); + GTEST_ASSERT_EQ(code, 0); + + azDeleteObjectsByPrefix(prefix); + // list object to check + + code = azPutObjectFromFile2(file, object, withcp); + GTEST_ASSERT_EQ(code, 0); + code = azGetObjectsByPrefix(prefix, path); + GTEST_ASSERT_EQ(code, 0); + code = azDeleteObjects(object_name, nobject); + GTEST_ASSERT_EQ(code, 0); + code = azGetObjectToFile(object_name, fileName); + GTEST_ASSERT_EQ(code, 0); + + // GTEST_ASSERT_NE(pEnv, nullptr); + */ + + azEnd(); } - -TEST_F(WalCleanEnv, removeOldMeta) { - int code = walRollFileInfo(pWal); - ASSERT(code == 0); - ASSERT(pWal->fileInfoSet != NULL); - code = walSaveMeta(pWal); - ASSERT(code == 0); - code = walRollFileInfo(pWal); - ASSERT(code == 0); - code = walSaveMeta(pWal); - ASSERT(code == 0); -} - -TEST_F(WalKeepEnv, readOldMeta) { - walResetEnv(); - int code; - - syncMeta.isWeek = -1; - syncMeta.seqNum = UINT64_MAX; - syncMeta.term = UINT64_MAX; - - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - char* oldss = NULL; - code = walMetaSerialize(pWal, &oldss); - ASSERT(code == 0); - - TearDown(); - SetUp(); - - ASSERT_EQ(pWal->vers.firstVer, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - - char* newss = NULL; - code = walMetaSerialize(pWal, &newss); - ASSERT(code == 0); - - int len = strlen(oldss); - ASSERT_EQ(len, strlen(newss)); - for (int i = 0; i < len; i++) { - EXPECT_EQ(oldss[i], newss[i]); - } - taosMemoryFree(oldss); - taosMemoryFree(newss); -} - -TEST_F(WalCleanEnv, write) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walAppendLog(pWal, i + 2, i, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, TSDB_CODE_WAL_INVALID_VER); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} - -TEST_F(WalCleanEnv, rollback) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 5); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 4); - code = walRollback(pWal, 3); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 2); - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} - -TEST_F(WalCleanEnv, rollbackMultiFile) { - int code; - for (int i = 0; i < 10; i++) { - code = walAppendLog(pWal, i, i + 1, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - if (i == 5) { - walBeginSnapshot(pWal, i, 0); - walEndSnapshot(pWal); - } - } - code = walRollback(pWal, 12); - ASSERT_NE(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 9); - code = walRollback(pWal, 9); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 8); - code = walRollback(pWal, 6); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 5); - code = walRollback(pWal, 5); - ASSERT_NE(code, 0); - - ASSERT_EQ(pWal->vers.lastVer, 5); - - code = walAppendLog(pWal, 6, 6, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, 6); - - code = walSaveMeta(pWal); - ASSERT_EQ(code, 0); -} - -TEST_F(WalCleanDeleteEnv, roll) { - int code; - int i; - for (i = 0; i < 100; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - ASSERT_EQ(pWal->vers.lastVer, i); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - - walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(pWal->vers.verInSnapshotting, i - 1); - walEndSnapshot(pWal); - ASSERT_EQ(pWal->vers.snapshotVer, i - 1); - ASSERT_EQ(pWal->vers.verInSnapshotting, -1); - - code = walAppendLog(pWal, 5, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_NE(code, 0); - - for (; i < 200; i++) { - code = walAppendLog(pWal, i, 0, syncMeta, (void*)ranStr, ranStrLen); - ASSERT_EQ(code, 0); - code = walCommit(pWal, i); - ASSERT_EQ(pWal->vers.commitVer, i); - } - - code = walBeginSnapshot(pWal, i - 1, 0); - ASSERT_EQ(code, 0); - code = walEndSnapshot(pWal); - ASSERT_EQ(code, 0); -} - -TEST_F(WalKeepEnv, readHandleRead) { - walResetEnv(); - int code; - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} - -TEST_F(WalRetentionEnv, repairMeta1) { - walResetEnv(); - int code; - - int i; - for (i = 0; i < 100; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - - TearDown(); - - // getchar(); - char buf[100]; - sprintf(buf, "%s/meta-ver%d", pathName, 0); - taosRemoveFile(buf); - sprintf(buf, "%s/meta-ver%d", pathName, 1); - taosRemoveFile(buf); - SetUp(); - // getchar(); - - ASSERT_EQ(pWal->vers.lastVer, 99); - - SWalReader* pRead = walOpenReader(pWal, NULL, 0); - ASSERT(pRead != NULL); - - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 100; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - - for (i = 100; i < 200; i++) { - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, i); - int len = strlen(newStr); - code = walAppendLog(pWal, i, 0, syncMeta, newStr, len); - ASSERT_EQ(code, 0); - } - - for (int i = 0; i < 1000; i++) { - int ver = taosRand() % 200; - code = walReadVer(pRead, ver); - ASSERT_EQ(code, 0); - - // printf("rrbody: \n"); - // for(int i = 0; i < pRead->pHead->head.len; i++) { - // printf("%d ", pRead->pHead->head.body[i]); - //} - // printf("\n"); - - ASSERT_EQ(pRead->pHead->head.version, ver); - ASSERT_EQ(pRead->curVersion, ver + 1); - char newStr[100]; - sprintf(newStr, "%s-%d", ranStr, ver); - int len = strlen(newStr); - ASSERT_EQ(pRead->pHead->head.bodyLen, len); - for (int j = 0; j < len; j++) { - EXPECT_EQ(newStr[j], pRead->pHead->head.body[j]); - } - } - walCloseReader(pRead); -} -*/ From 5b750b350f03ce8c9f546dcead6ba8d205c6c266 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 18 Oct 2024 16:29:21 +0800 Subject: [PATCH 032/102] az/checkCfg: remove duplicate void fprintf --- contrib/test/azure/main.cpp | 15 +++++++ source/libs/azure/src/az.cpp | 81 +++++++++++++++++------------------- 2 files changed, 54 insertions(+), 42 deletions(-) diff --git a/contrib/test/azure/main.cpp b/contrib/test/azure/main.cpp index badfef623b..943546a5fb 100644 --- a/contrib/test/azure/main.cpp +++ b/contrib/test/azure/main.cpp @@ -1,3 +1,18 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + /** * @file * @brief Application that consumes the Azure SDK for C++. diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index a2411c883d..7e4b711fb4 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -44,21 +44,28 @@ int32_t azBegin() { return TSDB_CODE_SUCCESS; } void azEnd() {} +static void checkPrint(const char *fmt, ...) { + va_list arg_ptr; + va_start(arg_ptr, fmt); + (void)vfprintf(stderr, fmt, arg_ptr); + va_end(arg_ptr); +} + static void azDumpCfgByEp(int8_t epIndex) { // clang-format off - (void)fprintf(stdout, - "%-24s %s\n" - "%-24s %s\n" - "%-24s %s\n" - "%-24s %s\n" - "%-24s %s\n" - "%-24s %s\n", - "hostName", tsS3Hostname[epIndex], - "bucketName", tsS3BucketName, - "protocol", "https only", - "uristyle", "path only", - "accessKey", tsS3AccessKeyId[epIndex], - "accessKeySecret", tsS3AccessKeySecret[epIndex]); + checkPrint( + "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n" + "%-24s %s\n", + "hostName", tsS3Hostname[epIndex], + "bucketName", tsS3BucketName, + "protocol", "https only", + "uristyle", "path only", + "accessKey", tsS3AccessKeyId[epIndex], + "accessKeySecret", tsS3AccessKeySecret[epIndex]); // clang-format on } @@ -81,17 +88,15 @@ static int32_t azListBucket(char const *bucketname) { Azure::Storage::Blobs::ListBlobsOptions options; options.Prefix = "s3"; - (void)fprintf(stderr, "objects:\n"); - // std::set listBlobs; + checkPrint("objects:\n"); for (auto pageResult = containerClient.ListBlobs(options); pageResult.HasPage(); pageResult.MoveToNextPage()) { for (const auto &blob : pageResult.Blobs) { - (void)fprintf(stderr, "%s\n", blob.Name.c_str()); + checkPrint("%s\n", blob.Name.c_str()); } } } catch (const Azure::Core::RequestFailedException &e) { azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), e.ReasonPhrase.c_str()); - // azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); code = TAOS_SYSTEM_ERROR(EIO); TAOS_RETURN(code); @@ -102,10 +107,7 @@ static int32_t azListBucket(char const *bucketname) { int32_t azCheckCfg() { int32_t code = 0, lino = 0; - int8_t i = 0; - // for (; i < tsS3EpNum; i++) { - (void)fprintf(stdout, "test s3 ep (%d/%d):\n", i + 1, tsS3EpNum); azDumpCfgByEp(0); // test put @@ -130,53 +132,51 @@ int32_t azCheckCfg() { TdFilePtr fp = taosOpenFile(path, TD_FILE_CREATE | TD_FILE_WRITE | TD_FILE_READ | TD_FILE_TRUNC); if (!fp) { - (void)fprintf(stderr, "failed to open test file: %s.\n", path); - // azError("ERROR: %s Failed to open %s", __func__, path); + checkPrint("failed to open test file: %s.\n", path); TAOS_CHECK_GOTO(terrno, &lino, _next); } if (taosWriteFile(fp, testdata, strlen(testdata)) < 0) { - (void)fprintf(stderr, "failed to write test file: %s.\n", path); + checkPrint("failed to write test file: %s.\n", path); TAOS_CHECK_GOTO(terrno, &lino, _next); } if (taosFsyncFile(fp) < 0) { - (void)fprintf(stderr, "failed to fsync test file: %s.\n", path); + checkPrint("failed to fsync test file: %s.\n", path); TAOS_CHECK_GOTO(terrno, &lino, _next); } (void)taosCloseFile(&fp); - (void)fprintf(stderr, "\nstart to put object: %s, file: %s content: %s\n", objectname[0], path, testdata); + checkPrint("\nstart to put object: %s, file: %s content: %s\n", objectname[0], path, testdata); code = azPutObjectFromFileOffset(path, objectname[0], 0, 16); if (code != 0) { - (void)fprintf(stderr, "put object %s : failed.\n", objectname[0]); + checkPrint("put object %s : failed.\n", objectname[0]); TAOS_CHECK_GOTO(code, &lino, _next); } - (void)fprintf(stderr, "put object %s: success.\n\n", objectname[0]); + checkPrint("put object %s: success.\n\n", objectname[0]); // list buckets - (void)fprintf(stderr, "start to list bucket %s by prefix s3.\n", tsS3BucketName); - // code = s3ListBucketByEp(tsS3BucketName, i); + checkPrint("start to list bucket %s by prefix s3.\n", tsS3BucketName); code = azListBucket(tsS3BucketName); if (code != 0) { - (void)fprintf(stderr, "listing bucket %s : failed.\n", tsS3BucketName); + checkPrint("listing bucket %s : failed.\n", tsS3BucketName); TAOS_CHECK_GOTO(code, &lino, _next); } - (void)fprintf(stderr, "listing bucket %s: success.\n\n", tsS3BucketName); + checkPrint("listing bucket %s: success.\n\n", tsS3BucketName); // test range get - (void)fprintf(stderr, "start to range get object %s offset: %d len: %d.\n", objectname[0], c_offset, c_len); + checkPrint("start to range get object %s offset: %d len: %d.\n", objectname[0], c_offset, c_len); code = azGetObjectBlock(objectname[0], c_offset, c_len, true, &pBlock); if (code != 0) { - (void)fprintf(stderr, "get object %s : failed.\n", objectname[0]); + checkPrint("get object %s : failed.\n", objectname[0]); TAOS_CHECK_GOTO(code, &lino, _next); } (void)memcpy(buf, pBlock, c_len); taosMemoryFree(pBlock); - (void)fprintf(stderr, "object content: %s\n", buf); - (void)fprintf(stderr, "get object %s: success.\n\n", objectname[0]); + checkPrint("object content: %s\n", buf); + checkPrint("get object %s: success.\n\n", objectname[0]); // delete test object - (void)fprintf(stderr, "start to delete object: %s.\n", objectname[0]); + checkPrint("start to delete object: %s.\n", objectname[0]); // code = azDeleteObjectsByPrefix(objectname[0]); azDeleteObjectsByPrefix(objectname[0]); /* @@ -185,7 +185,7 @@ int32_t azCheckCfg() { TAOS_CHECK_GOTO(code, &lino, _next); } */ - (void)fprintf(stderr, "delete object %s: success.\n\n", objectname[0]); + checkPrint("delete object %s: success.\n\n", objectname[0]); _next: if (fp) { @@ -193,13 +193,10 @@ _next: } if (TSDB_CODE_SUCCESS != code) { - (void)fprintf(stderr, "s3 check failed, code: %d, line: %d, index: %d.\n", code, lino, i); + checkPrint("s3 check failed, code: %d, line: %d.\n", code, lino); } - (void)fprintf(stdout, "=================================================================\n"); - //} - - // azEnd(); + checkPrint("=================================================================\n"); TAOS_RETURN(code); } From af95725c763aaa096202d767c49e008bda9999b0 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 18 Oct 2024 16:33:27 +0800 Subject: [PATCH 033/102] az/test: ut for linux only --- source/libs/azure/test/CMakeLists.txt | 4 ++++ source/libs/azure/test/azTest.cpp | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/source/libs/azure/test/CMakeLists.txt b/source/libs/azure/test/CMakeLists.txt index 01570df730..ea91dbd2fc 100644 --- a/source/libs/azure/test/CMakeLists.txt +++ b/source/libs/azure/test/CMakeLists.txt @@ -1,3 +1,5 @@ +if (TD_LINUX) + aux_source_directory(. AZ_TEST_SRC) add_executable(azTest ${AZ_TEST_SRC}) @@ -16,3 +18,5 @@ add_test( NAME az_test COMMAND azTest ) + +endif(TD_LINUX) diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp index 2780cddacc..8d428fbb69 100644 --- a/source/libs/azure/test/azTest.cpp +++ b/source/libs/azure/test/azTest.cpp @@ -41,8 +41,8 @@ int32_t azInitEnv() { return code; } -// TEST(AzTest, DISABLED_InterfaceTest) { -TEST(AzTest, InterfaceTest) { +TEST(AzTest, DISABLED_InterfaceTest) { + // TEST(AzTest, InterfaceTest) { int code = 0; code = azInitEnv(); From 5b036b394c70ecc36624295db637b76d0365689f Mon Sep 17 00:00:00 2001 From: sheyanjie-qq <249478495@qq.com> Date: Fri, 18 Oct 2024 17:28:58 +0800 Subject: [PATCH 034/102] update jdbc driver version --- docs/examples/JDBC/JDBCDemo/pom.xml | 2 +- docs/examples/JDBC/connectionPools/pom.xml | 2 +- docs/examples/JDBC/consumer-demo/pom.xml | 2 +- docs/examples/JDBC/taosdemo/pom.xml | 2 +- docs/examples/java/pom.xml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/examples/JDBC/JDBCDemo/pom.xml b/docs/examples/JDBC/JDBCDemo/pom.xml index 315b147cce..4b3e1ab675 100644 --- a/docs/examples/JDBC/JDBCDemo/pom.xml +++ b/docs/examples/JDBC/JDBCDemo/pom.xml @@ -19,7 +19,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 org.locationtech.jts diff --git a/docs/examples/JDBC/connectionPools/pom.xml b/docs/examples/JDBC/connectionPools/pom.xml index 1791bfe8bc..e3ef30d2f8 100644 --- a/docs/examples/JDBC/connectionPools/pom.xml +++ b/docs/examples/JDBC/connectionPools/pom.xml @@ -18,7 +18,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 diff --git a/docs/examples/JDBC/consumer-demo/pom.xml b/docs/examples/JDBC/consumer-demo/pom.xml index dcabfc1249..709f87d9c1 100644 --- a/docs/examples/JDBC/consumer-demo/pom.xml +++ b/docs/examples/JDBC/consumer-demo/pom.xml @@ -17,7 +17,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 com.google.guava diff --git a/docs/examples/JDBC/taosdemo/pom.xml b/docs/examples/JDBC/taosdemo/pom.xml index ffe159ea49..ab5912aa9e 100644 --- a/docs/examples/JDBC/taosdemo/pom.xml +++ b/docs/examples/JDBC/taosdemo/pom.xml @@ -67,7 +67,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 diff --git a/docs/examples/java/pom.xml b/docs/examples/java/pom.xml index f23b73177e..e1a9504249 100644 --- a/docs/examples/java/pom.xml +++ b/docs/examples/java/pom.xml @@ -22,7 +22,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 From 4268f9e65cd350ff80aa481fc96a1b428bc7e2c0 Mon Sep 17 00:00:00 2001 From: sheyanjie-qq <249478495@qq.com> Date: Wed, 16 Oct 2024 19:23:21 +0800 Subject: [PATCH 035/102] java websocket use own protocal --- .../com/taos/example/SchemalessWsTest.java | 4 +- .../com/taos/example/WSConnectExample.java | 4 +- .../example/WSParameterBindingBasicDemo.java | 7 +- .../example/WSParameterBindingFullDemo.java | 21 +++--- .../07-develop/01-connect/_connect_rust.mdx | 2 +- docs/zh/07-develop/01-connect/index.md | 26 +++---- docs/zh/07-develop/02-sql.md | 16 ++-- docs/zh/07-develop/04-schemaless.md | 2 +- docs/zh/07-develop/05-stmt.md | 2 +- docs/zh/07-develop/07-tmq.md | 16 ++-- docs/zh/08-operation/18-dual.md | 17 ++--- .../01-components/03-taosadapter.md | 4 +- docs/zh/14-reference/05-connector/10-cpp.mdx | 20 ++--- docs/zh/14-reference/05-connector/14-java.mdx | 73 +++++++++++-------- docs/zh/14-reference/05-connector/26-rust.mdx | 18 ++--- .../14-reference/05-connector/30-python.mdx | 8 +- docs/zh/14-reference/05-connector/35-node.mdx | 6 +- .../14-reference/05-connector/40-csharp.mdx | 2 +- docs/zh/14-reference/05-connector/index.md | 2 +- 19 files changed, 131 insertions(+), 119 deletions(-) diff --git a/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java b/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java index 08f66c2227..0f35e38f57 100644 --- a/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java +++ b/docs/examples/java/src/main/java/com/taos/example/SchemalessWsTest.java @@ -17,8 +17,8 @@ public class SchemalessWsTest { private static final String jsonDemo = "{\"metric\": \"metric_json\",\"timestamp\": 1626846400,\"value\": 10.3, \"tags\": {\"groupid\": 2, \"location\": \"California.SanFrancisco\", \"id\": \"d1001\"}}"; public static void main(String[] args) throws SQLException { - final String url = "jdbc:TAOS-RS://" + host + ":6041?user=root&password=taosdata&batchfetch=true"; - try(Connection connection = DriverManager.getConnection(url)){ + final String url = "jdbc:TAOS-WS://" + host + ":6041?user=root&password=taosdata"; + try (Connection connection = DriverManager.getConnection(url)) { init(connection); AbstractConnection conn = connection.unwrap(AbstractConnection.class); diff --git a/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java b/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java index afe74ace83..052af71a83 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSConnectExample.java @@ -12,9 +12,9 @@ public class WSConnectExample { public static void main(String[] args) throws Exception { // use // String jdbcUrl = - // "jdbc:TAOS-RS://localhost:6041/dbName?user=root&password=taosdata&batchfetch=true"; + // "jdbc:TAOS-WS://localhost:6041/dbName?user=root&password=taosdata"; // if you want to connect a specified database named "dbName". - String jdbcUrl = "jdbc:TAOS-RS://localhost:6041?user=root&password=taosdata&batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://localhost:6041?user=root&password=taosdata"; Properties connProps = new Properties(); connProps.setProperty(TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT, "true"); connProps.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8"); diff --git a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java index eab8df06b9..1353ebbddc 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingBasicDemo.java @@ -15,7 +15,7 @@ public class WSParameterBindingBasicDemo { public static void main(String[] args) throws SQLException { - String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041"; try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) { init(conn); @@ -40,7 +40,7 @@ public class WSParameterBindingBasicDemo { pstmt.setFloat(4, random.nextFloat()); pstmt.addBatch(); } - int [] exeResult = pstmt.executeBatch(); + int[] exeResult = pstmt.executeBatch(); // you can check exeResult here System.out.println("Successfully inserted " + exeResult.length + " rows to power.meters."); } @@ -60,7 +60,8 @@ public class WSParameterBindingBasicDemo { try (Statement stmt = conn.createStatement()) { stmt.execute("CREATE DATABASE IF NOT EXISTS power"); stmt.execute("USE power"); - stmt.execute("CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))"); + stmt.execute( + "CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))"); } } } diff --git a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java index f23fb187f4..7eaccb3db2 100644 --- a/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java +++ b/docs/examples/java/src/main/java/com/taos/example/WSParameterBindingFullDemo.java @@ -40,7 +40,7 @@ public class WSParameterBindingFullDemo { public static void main(String[] args) throws SQLException { - String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true"; + String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041/"; try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) { @@ -51,8 +51,10 @@ public class WSParameterBindingFullDemo { stmtAll(conn); } catch (SQLException ex) { - // handle any errors, please refer to the JDBC specifications for detailed exceptions info - System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + ex.getMessage()); + // handle any errors, please refer to the JDBC specifications for detailed + // exceptions info + System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + + ex.getMessage()); throw ex; } catch (Exception ex) { System.out.println("Failed to insert data using stmt, ErrMessage: " + ex.getMessage()); @@ -104,30 +106,29 @@ public class WSParameterBindingFullDemo { pstmt.setTagBoolean(3, true); pstmt.setTagString(4, "binary_value"); pstmt.setTagNString(5, "nchar_value"); - pstmt.setTagVarbinary(6, new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e}); - pstmt.setTagGeometry(7, new byte[]{ + pstmt.setTagVarbinary(6, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e }); + pstmt.setTagGeometry(7, new byte[] { 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x40, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x59, 0x40}); + 0x00, 0x00, 0x00, 0x59, 0x40 }); long current = System.currentTimeMillis(); - pstmt.setTimestamp(1, new Timestamp(current)); pstmt.setInt(2, 1); pstmt.setDouble(3, 1.1); pstmt.setBoolean(4, true); pstmt.setString(5, "binary_value"); pstmt.setNString(6, "nchar_value"); - pstmt.setVarbinary(7, new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e}); - pstmt.setGeometry(8, new byte[]{ + pstmt.setVarbinary(7, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e }); + pstmt.setGeometry(8, new byte[] { 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x59, 0x40, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x59, 0x40}); + 0x00, 0x00, 0x00, 0x59, 0x40 }); pstmt.addBatch(); pstmt.executeBatch(); System.out.println("Successfully inserted rows to example_all_type_stmt.ntb"); diff --git a/docs/zh/07-develop/01-connect/_connect_rust.mdx b/docs/zh/07-develop/01-connect/_connect_rust.mdx index 0e65e8f920..d88a3335ca 100644 --- a/docs/zh/07-develop/01-connect/_connect_rust.mdx +++ b/docs/zh/07-develop/01-connect/_connect_rust.mdx @@ -3,6 +3,6 @@ ``` :::note -对于 Rust 连接器, 连接方式的不同只体现在使用的特性不同。如果启用了 "ws" 特性,那么只有 Websocket 的实现会被编译进来。 +对于 Rust 连接器, 连接方式的不同只体现在使用的特性不同。如果启用了 "ws" 特性,那么只有 WebSocket 的实现会被编译进来。 ::: diff --git a/docs/zh/07-develop/01-connect/index.md b/docs/zh/07-develop/01-connect/index.md index 1dfb95d169..bd26bea46d 100644 --- a/docs/zh/07-develop/01-connect/index.md +++ b/docs/zh/07-develop/01-connect/index.md @@ -28,7 +28,7 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 1. 通过客户端驱动程序 taosc 直接与服务端程序 taosd 建立连接,这种连接方式下文中简称 “原生连接”。 2. 通过 taosAdapter 组件提供的 REST API 建立与 taosd 的连接,这种连接方式下文中简称 “REST 连接” -3. 通过 taosAdapter 组件提供的 Websocket API 建立与 taosd 的连接,这种连接方式下文中简称 “Websocket 连接” +3. 通过 taosAdapter 组件提供的 WebSocket API 建立与 taosd 的连接,这种连接方式下文中简称 “WebSocket 连接” ![TDengine connection type](connection-type-zh.webp) @@ -38,9 +38,9 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 关键不同点在于: 1. 使用 原生连接,需要保证客户端的驱动程序 taosc 和服务端的 TDengine 版本配套。 -2. 使用 REST 连接,用户无需安装客户端驱动程序 taosc,具有跨平台易用的优势,但是无法体验数据订阅和二进制数据类型等功能。另外与 原生连接 和 Websocket 连接相比,REST连接的性能最低。REST 接口是无状态的。在使用 REST 连接时,需要在 SQL 中指定表、超级表的数据库名称。 -3. 使用 Websocket 连接,用户也无需安装客户端驱动程序 taosc。 -4. 连接云服务实例,必须使用 REST 连接 或 Websocket 连接。 +2. 使用 REST 连接,用户无需安装客户端驱动程序 taosc,具有跨平台易用的优势,但是无法体验数据订阅和二进制数据类型等功能。另外与 原生连接 和 WebSocket 连接相比,REST连接的性能最低。REST 接口是无状态的。在使用 REST 连接时,需要在 SQL 中指定表、超级表的数据库名称。 +3. 使用 WebSocket 连接,用户也无需安装客户端驱动程序 taosc。 +4. 连接云服务实例,必须使用 REST 连接 或 WebSocket 连接。 **推荐使用 WebSocket 连接** @@ -126,7 +126,7 @@ TDengine 提供了丰富的应用程序开发接口,为了便于用户快速 ```bash pip3 install taos-ws-py ``` - :::note 此安装包为 Websocket 连接器 + :::note 此安装包为 WebSocket 连接器 - 同时安装 `taospy` 和 `taos-ws-py` ```bash pip3 install taospy[ws] @@ -182,7 +182,7 @@ taos = { version = "*"} ``` :::info -Rust 连接器通过不同的特性区分不同的连接方式。默认同时支持原生连接和 Websocket 连接,如果仅需要建立 Websocket 连接,可设置 `ws` 特性: +Rust 连接器通过不同的特性区分不同的连接方式。默认同时支持原生连接和 WebSocket 连接,如果仅需要建立 WebSocket 连接,可设置 `ws` 特性: ```toml taos = { version = "*", default-features = false, features = ["ws"] } @@ -201,7 +201,7 @@ taos = { version = "*", default-features = false, features = ["ws"] } ``` npm install @tdengine/websocket ``` - :::note Node.js 目前只支持 Websocket 连接 + :::note Node.js 目前只支持 WebSocket 连接 - **安装验证** - 新建安装验证目录,例如:`~/tdengine-test`,下载 GitHub 上 [nodejsChecker.js 源代码](https://github.com/taosdata/TDengine/tree/main/docs/examples/node/websocketexample/nodejsChecker.js)到本地。 - 在命令行中执行以下命令。 @@ -271,12 +271,10 @@ dotnet add package TDengine.Connector Java 连接器建立连接的参数有 URL 和 Properties。 TDengine 的 JDBC URL 规范格式为: - `jdbc:[TAOS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` + `jdbc:[TAOS|TAOS-WS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` URL 和 Properties 的详细参数说明和如何使用详见 [url 规范](../../reference/connector/java/#url-规范) - **注**:REST 连接中增加 `batchfetch` 参数并设置为 true,将开启 WebSocket 连接。 - Python 连接器使用 `connect()` 方法来建立连接,下面是连接参数的具体说明: @@ -387,8 +385,8 @@ DSN 的详细说明和如何使用详见 [连接功能](../../reference/connecto - `reconnectIntervalMs`:重连间隔毫秒时间,默认为 2000。 -**Websocket 连接** -C/C++ 语言连接器 Websocket 连接方式使用 `ws_connect()` 函数用于建立与 TDengine 数据库的连接。其参数为 DSN 描述字符串,其基本结构如下: +**WebSocket 连接** +C/C++ 语言连接器 WebSocket 连接方式使用 `ws_connect()` 函数用于建立与 TDengine 数据库的连接。其参数为 DSN 描述字符串,其基本结构如下: ```text [+]://[[:@]:][/][?=[&=]] @@ -417,8 +415,8 @@ C/C++ 语言连接器原生连接方式使用 `taos_connect()` 函数用于建 -### Websocket 连接 -下面是各语言连接器建立 Websocket 连接代码样例。演示了如何使用 Websocket 连接方式连接到 TDengine 数据库,并对连接设定一些参数。整个过程主要涉及到数据库连接的建立和异常处理。 +### WebSocket 连接 +下面是各语言连接器建立 WebSocket 连接代码样例。演示了如何使用 WebSocket 连接方式连接到 TDengine 数据库,并对连接设定一些参数。整个过程主要涉及到数据库连接的建立和异常处理。 diff --git a/docs/zh/07-develop/02-sql.md b/docs/zh/07-develop/02-sql.md index 5461c975dd..b4274045fc 100644 --- a/docs/zh/07-develop/02-sql.md +++ b/docs/zh/07-develop/02-sql.md @@ -33,7 +33,7 @@ REST API:直接调用 `taosadapter` 提供的 REST API 接口,进行数据 -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/create_db_ws.py}} ``` @@ -69,7 +69,7 @@ REST API:直接调用 `taosadapter` 提供的 REST API 接口,进行数据 -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/create_db_demo.c:create_db_and_table}} ``` @@ -114,7 +114,7 @@ NOW 为系统内部函数,默认为客户端所在计算机当前时间。 NOW -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/insert_ws.py}} ``` @@ -151,7 +151,7 @@ NOW 为系统内部函数,默认为客户端所在计算机当前时间。 NOW -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/insert_data_demo.c:insert_data}} ``` @@ -189,7 +189,7 @@ curl --location -uroot:taosdata 'http://127.0.0.1:6041/rest/sql' \ -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/query_ws.py}} ``` @@ -230,7 +230,7 @@ rust 连接器还支持使用 **serde** 进行反序列化行为结构体的结 -```c title="Websocket 连接" +```c title="WebSocket 连接" {{#include docs/examples/c-ws/query_data_demo.c:query_data}} ``` @@ -273,7 +273,7 @@ reqId 可用于请求链路追踪,reqId 就像分布式系统中的 traceId -```python title="Websocket 连接" +```python title="WebSocket 连接" {{#include docs/examples/python/reqid_ws.py}} ``` @@ -310,7 +310,7 @@ reqId 可用于请求链路追踪,reqId 就像分布式系统中的 traceId -```c "Websocket 连接" +```c "WebSocket 连接" {{#include docs/examples/c-ws/with_reqid_demo.c:with_reqid}} ``` diff --git a/docs/zh/07-develop/04-schemaless.md b/docs/zh/07-develop/04-schemaless.md index a865b58b28..bf10b41736 100644 --- a/docs/zh/07-develop/04-schemaless.md +++ b/docs/zh/07-develop/04-schemaless.md @@ -191,7 +191,7 @@ st,t1=3,t2=4,t3=t3 c1=3i64,c6="passit" 1626006833640000000 ::: -### Websocket 连接 +### WebSocket 连接 diff --git a/docs/zh/07-develop/05-stmt.md b/docs/zh/07-develop/05-stmt.md index 624600ba4d..74b44ba8e6 100644 --- a/docs/zh/07-develop/05-stmt.md +++ b/docs/zh/07-develop/05-stmt.md @@ -23,7 +23,7 @@ import TabItem from "@theme/TabItem"; - 执行批量插入操作,将这些数据行插入到对应的子表中。 3. 最后打印实际插入表中的行数。 -## Websocket 连接 +## WebSocket 连接 ```java diff --git a/docs/zh/07-develop/07-tmq.md b/docs/zh/07-develop/07-tmq.md index c668203259..a91a764c67 100644 --- a/docs/zh/07-develop/07-tmq.md +++ b/docs/zh/07-develop/07-tmq.md @@ -94,7 +94,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 -- Websocket 连接: 因为使用 dsn,不需要 `td.connect.ip`,`td.connect.port`,`td.connect.user` 和 `td.connect.pass` 四个配置项,其余同通用配置项。 +- WebSocket 连接: 因为使用 dsn,不需要 `td.connect.ip`,`td.connect.port`,`td.connect.user` 和 `td.connect.pass` 四个配置项,其余同通用配置项。 - 原生连接: 同通用基础配置项。 @@ -103,8 +103,8 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 -### Websocket 连接 -介绍各语言连接器使用 Websocket 连接方式创建消费者。指定连接的服务器地址,设置自动提交,从最新消息开始消费,指定 `group.id` 和 `client.id` 等信息。有的语言的连接器还支持反序列化参数。 +### WebSocket 连接 +介绍各语言连接器使用 WebSocket 连接方式创建消费者。指定连接的服务器地址,设置自动提交,从最新消息开始消费,指定 `group.id` 和 `client.id` 等信息。有的语言的连接器还支持反序列化参数。 @@ -234,7 +234,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 订阅消费数据 消费者订阅主题后,可以开始接收并处理这些主题中的消息。订阅消费数据的示例代码如下: -### Websocket 连接 +### WebSocket 连接 @@ -403,7 +403,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 指定订阅的 Offset 消费者可以指定从特定 Offset 开始读取分区中的消息,这允许消费者重读消息或跳过已处理的消息。下面展示各语言连接器如何指定订阅的 Offset。 -### Websocket 连接 +### WebSocket 连接 @@ -549,7 +549,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 **注意**:手工提交消费进度前确保消息正常处理完成,否则处理出错的消息不会被再次消费。自动提交是在本次 `poll` 消息时可能会提交上次消息的消费进度,因此请确保消息处理完毕再进行下一次 `poll` 或消息获取。 -### Websocket 连接 +### WebSocket 连接 @@ -663,7 +663,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 取消订阅和关闭消费 消费者可以取消对主题的订阅,停止接收消息。当消费者不再需要时,应该关闭消费者实例,以释放资源和断开与 TDengine 服务器的连接。 -### Websocket 连接 +### WebSocket 连接 @@ -766,7 +766,7 @@ Rust 连接器创建消费者的参数为 DSN, 可以设置的参数列表请 ## 完整示例 -### Websocket 连接 +### WebSocket 连接
diff --git a/docs/zh/08-operation/18-dual.md b/docs/zh/08-operation/18-dual.md index c7871a8e1e..caddb7ab3b 100644 --- a/docs/zh/08-operation/18-dual.md +++ b/docs/zh/08-operation/18-dual.md @@ -30,9 +30,8 @@ toc_max_heading_level: 4 目前只有 Java 连接器在 WebSocket 连接模式下支持双活,其配置示例如下 ```java -url = "jdbc:TAOS-RS://" + host + ":6041/?user=root&password=taosdata"; +url = "jdbc:TAOS-WS://" + host + ":6041/?user=root&password=taosdata"; Properties properties = new Properties(); -properties.setProperty(TSDBDriver.PROPERTY_KEY_BATCH_LOAD, "true"); properties.setProperty(TSDBDriver.PROPERTY_KEY_SLAVE_CLUSTER_HOST, "192.168.1.11"); properties.setProperty(TSDBDriver.PROPERTY_KEY_SLAVE_CLUSTER_PORT, "6041"); properties.setProperty(TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT, "true"); @@ -43,13 +42,13 @@ connection = DriverManager.getConnection(url, properties); 其中的配置属性及含义如下表 -| 属性名 | 含义 | -| ----------------- | ------------------ | -| PROPERTY_KEY_SLAVE_CLUSTER_HOST | 第二节点的主机名或者 ip,默认空 | -| PROPERTY_KEY_SLAVE_CLUSTER_PORT | 第二节点的端口号,默认空 | -| PROPERTY_KEY_ENABLE_AUTO_RECONNECT | 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。双活场景下请设置为 true | -| PROPERTY_KEY_RECONNECT_INTERVAL_MS | 重连的时间间隔,单位毫秒:默认 2000 毫秒,也就是 2 秒;最小值为 0, 表示立即重试;最大值不做限制 | -| PROPERTY_KEY_RECONNECT_RETRY_COUNT | 每节点最多重试次数:默认值为 3;最小值为 0,表示不进行重试;最大值不做限制 | +| 属性名 | 含义 | +| ---------------------------------- | ----------------------------------------------------------------------------------------------------------------- | +| PROPERTY_KEY_SLAVE_CLUSTER_HOST | 第二节点的主机名或者 ip,默认空 | +| PROPERTY_KEY_SLAVE_CLUSTER_PORT | 第二节点的端口号,默认空 | +| PROPERTY_KEY_ENABLE_AUTO_RECONNECT | 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。双活场景下请设置为 true | +| PROPERTY_KEY_RECONNECT_INTERVAL_MS | 重连的时间间隔,单位毫秒:默认 2000 毫秒,也就是 2 秒;最小值为 0, 表示立即重试;最大值不做限制 | +| PROPERTY_KEY_RECONNECT_RETRY_COUNT | 每节点最多重试次数:默认值为 3;最小值为 0,表示不进行重试;最大值不做限制 | ### 约束条件 diff --git a/docs/zh/14-reference/01-components/03-taosadapter.md b/docs/zh/14-reference/01-components/03-taosadapter.md index a8e8fc3418..7d69cab598 100644 --- a/docs/zh/14-reference/01-components/03-taosadapter.md +++ b/docs/zh/14-reference/01-components/03-taosadapter.md @@ -306,7 +306,7 @@ http 返回内容: ## taosAdapter 监控指标 -taosAdapter 采集 REST/Websocket 相关请求的监控指标。将监控指标上报给 taosKeeper,这些监控指标会被 taosKeeper 写入监控数据库,默认是 `log` 库,可以在 taoskeeper 配置文件中修改。以下是这些监控指标的详细介绍。 +taosAdapter 采集 REST/WebSocket 相关请求的监控指标。将监控指标上报给 taosKeeper,这些监控指标会被 taosKeeper 写入监控数据库,默认是 `log` 库,可以在 taoskeeper 配置文件中修改。以下是这些监控指标的详细介绍。 #### adapter\_requests 表 @@ -331,7 +331,7 @@ taosAdapter 采集 REST/Websocket 相关请求的监控指标。将监控指标 | query\_in\_process | INT UNSIGNED | | 正在处理查询请求数 | | write\_in\_process | INT UNSIGNED | | 正在处理写入请求数 | | endpoint | VARCHAR | | 请求端点 | -| req\_type | NCHAR | TAG | 请求类型:0 为 REST,1 为 Websocket | +| req\_type | NCHAR | TAG | 请求类型:0 为 REST,1 为 WebSocket | ## 结果返回条数限制 diff --git a/docs/zh/14-reference/05-connector/10-cpp.mdx b/docs/zh/14-reference/05-connector/10-cpp.mdx index c618601fb9..7164baad2a 100644 --- a/docs/zh/14-reference/05-connector/10-cpp.mdx +++ b/docs/zh/14-reference/05-connector/10-cpp.mdx @@ -5,14 +5,14 @@ toc_max_heading_level: 4 --- C/C++ 开发人员可以使用 TDengine 的客户端驱动,即 C/C++连接器 (以下都用 TDengine 客户端驱动表示),开发自己的应用来连接 TDengine 集群完成数据存储、查询以及其他功能。TDengine 客户端驱动的 API 类似于 MySQL 的 C API。应用程序使用时,需要包含 TDengine 头文件,里面列出了提供的 API 的函数原型;应用程序还要链接到所在平台上对应的动态库。 -TDengine 的客户端驱动提供了 taosws 和 taos 两个动态库,分别支持 Websocket 连接和原生连接。 Websocket 连接和原生连接的区别是 Websocket 连接方式不要求客户端和服务端版本完全匹配,而原生连接要求,在性能上 Websocket 连接方式也接近于原生连接,一般我们推荐使用 Websocket 连接方式。 +TDengine 的客户端驱动提供了 taosws 和 taos 两个动态库,分别支持 WebSocket 连接和原生连接。 WebSocket 连接和原生连接的区别是 WebSocket 连接方式不要求客户端和服务端版本完全匹配,而原生连接要求,在性能上 WebSocket 连接方式也接近于原生连接,一般我们推荐使用 WebSocket 连接方式。 下面我们分开介绍两种连接方式的使用方法。 -## Websocket 连接方式 +## WebSocket 连接方式 -Websocket 连接方式需要使用 taosws.h 头文件和 taosws 动态库。 +WebSocket 连接方式需要使用 taosws.h 头文件和 taosws 动态库。 ```c #include @@ -44,7 +44,7 @@ TDengine 客户端驱动的动态库位于: ### 错误码 在 C 接口的设计中,错误码采用整数类型表示,每个错误码都对应一个特定的错误状态。如未特别说明,当 API 的返回值是整数时,_0_ 代表成功,其它是代表失败原因的错误码,当返回值是指针时, _NULL_ 表示失败。 -Websocket 连接方式单独的错误码在 `taosws.h` 中, +WebSocket 连接方式单独的错误码在 `taosws.h` 中, | 错误码 | 错误描述 | 可能的出错场景或者可能的原因 | 建议用户采取的措施 | @@ -82,7 +82,7 @@ WebSocket 连接方式错误码只保留了原生连接错误码的后两个字 #### DSN -C/C++ Websocket 连接器通过 DSN 连接描述字符串来表示连接信息。 +C/C++ WebSocket 连接器通过 DSN 连接描述字符串来表示连接信息。 DSN 描述字符串基本结构如下: ```text @@ -96,16 +96,16 @@ DSN 描述字符串基本结构如下: - **driver**: 必须指定驱动名以便连接器选择何种方式创建连接,支持如下驱动名: - **taos**: 默认驱动,支持 SQL 执行,参数绑定,无模式写入。 - **tmq**: 使用 TMQ 订阅数据。 -- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 Websocket 方式建立连接。 - - **http/ws**: 使用 Websocket 协议。 - - **https/wss**: 在 Websocket 连接方式下显示启用 SSL/TLS 协议。 +- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 WebSocket 方式建立连接。 + - **http/ws**: 使用 WebSocket 协议。 + - **https/wss**: 在 WebSocket 连接方式下显示启用 SSL/TLS 协议。 - **username/password**: 用于创建连接的用户名及密码。 -- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时 Websocket 连接默认为 `localhost:6041` 。 +- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时 WebSocket 连接默认为 `localhost:6041` 。 - **database**: 指定默认连接的数据库名,可选参数。 - **params**:其他可选参数。 -一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 Websocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 +一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 WebSocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 #### 基础 API diff --git a/docs/zh/14-reference/05-connector/14-java.mdx b/docs/zh/14-reference/05-connector/14-java.mdx index ba4cb38afd..0a167dd5ee 100644 --- a/docs/zh/14-reference/05-connector/14-java.mdx +++ b/docs/zh/14-reference/05-connector/14-java.mdx @@ -33,14 +33,15 @@ REST 连接支持所有能运行 Java 的平台。 | taos-jdbcdriver 版本 | 主要变化 | TDengine 版本 | | :------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------: | -| 3.3.3 | 1. 解决了 Websocket statement 关闭导致的内存泄漏 | - | -| 3.3.2 | 1. 优化 Websocket 连接下的参数绑定性能;2. 优化了对 mybatis 的支持 | - | -| 3.3.0 | 1. 优化 Websocket 连接下的数据传输性能;2. 支持跳过 SSL 验证,默认关闭 | 3.3.2.0 及更高版本 | +| 3.4.0 | 1. 使用 jackson 库替换 fastjson 库;2. WebSocket 采用独立协议标识;3. 优化后台拉取线程使用,避免用户误用导致超时。 | - | +| 3.3.3 | 1. 解决了 WebSocket statement 关闭导致的内存泄漏 | - | +| 3.3.2 | 1. 优化 WebSocket 连接下的参数绑定性能;2. 优化了对 mybatis 的支持 | - | +| 3.3.0 | 1. 优化 WebSocket 连接下的数据传输性能;2. 支持跳过 SSL 验证,默认关闭 | 3.3.2.0 及更高版本 | | 3.2.11 | 解决了 Native 连接关闭结果集 bug | - | -| 3.2.10 | 1. REST/WebSocket 连接支持传输中的数据压缩;2. Websocket 自动重连机制,默认关闭;3. Connection 类提供无模式写入的方法;4. 优化了原生连接的数据拉取性能;5. 修复了一些已知问题;6.元数据获取函数可以返回支持的函数列表。 | - | -| 3.2.9 | 解决了 Websocket prepareStatement 关闭 bug | - | -| 3.2.8 | 优化了自动提交, 解决了 websocket 手动提交 bug, 优化 Websocket prepareStatement 使用一个连接, 元数据支持视图 | - | -| 3.2.7 | 支持 VARBINARY 和 GEOMETRY 类型,增加 native 连接的时区设置支持。增加 websocket 自动重连功能。 | 3.2.0.0 及更高版本 | +| 3.2.10 | 1. REST/WebSocket 连接支持传输中的数据压缩;2. WebSocket 自动重连机制,默认关闭;3. Connection 类提供无模式写入的方法;4. 优化了原生连接的数据拉取性能;5. 修复了一些已知问题;6.元数据获取函数可以返回支持的函数列表。 | - | +| 3.2.9 | 解决了 WebSocket prepareStatement 关闭 bug | - | +| 3.2.8 | 优化了自动提交, 解决了 WebSocket 手动提交 bug, 优化 WebSocket prepareStatement 使用一个连接, 元数据支持视图 | - | +| 3.2.7 | 支持 VARBINARY 和 GEOMETRY 类型,增加 native 连接的时区设置支持。增加 WebSocket 自动重连功能。 | 3.2.0.0 及更高版本 | | 3.2.5 | 数据订阅增加 committed()、assignment() 方法 | 3.1.0.3 及更高版本 | | 3.2.4 | 数据订阅在 WebSocket 连接下增加 enable.auto.commit 参数,以及 unsubscribe() 方法。 | - | | 3.2.3 | 修复 ResultSet 在一些情况数据解析失败 | - | @@ -195,17 +196,14 @@ WKB规范请参考[Well-Known Binary (WKB)](https://libgeos.org/specifications/w ## API 参考 ### JDBC 驱动 -taos-jdbcdriver 实现了 JDBC 标准的 Driver 接口,提供了两个实现类:RestfulDriver 和 TSDBDriver。 -Websocket 和 REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 - +taos-jdbcdriver 实现了 JDBC 标准的 Driver 接口,提供了 3 个实现类。 +- WebSocket 连接使用驱动类 `com.taosdata.jdbc.ws.WebSocketDriver`。 +- 原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 +- REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。 #### URL 规范 TDengine 的 JDBC URL 规范格式为: -`jdbc:[TAOS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` - -对于建立连接,原生连接与 REST 连接有细微不同。 Websocket 和 REST 连接使用驱动类 `com.taosdata.jdbc.rs.RestfulDriver`。原生连接使用驱动类 `com.taosdata.jdbc.TSDBDriver`。 - -**注**:REST 连接中增加 `batchfetch` 参数并设置为 true,将开启 WebSocket 连接。 +`jdbc:[TAOS|TAOS-WS|TAOS-RS]://[host_name]:[port]/[database_name]?[user={user}|&password={password}|&charset={charset}|&cfgdir={config_dir}|&locale={locale}|&timezone={timezone}|&batchfetch={batchfetch}]` **原生连接** `jdbc:TAOS://taosdemo.com:6030/power?user=root&password=taosdata`,使用了 JDBC 原生连接的 TSDBDriver,建立了到 hostname 为 taosdemo.com,端口为 6030(TDengine 的默认端口),数据库名为 power 的连接。这个 URL @@ -234,23 +232,38 @@ TDengine 中,只要保证 firstEp 和 secondEp 中一个节点有效,就可 > **注意**:这里的配置文件指的是调用 JDBC Connector 的应用程序所在机器上的配置文件,Linux OS 上默认值 /etc/taos/taos.cfg ,Windows OS 上默认值 C://TDengine/cfg/taos.cfg。 +**WebSocket 连接** +使用 JDBC WebSocket 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: -**Websocket 和 REST 连接** -使用 JDBC Websocket 或 REST 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: +1. driverClass 指定为“com.taosdata.jdbc.ws.WebSocketDriver”; +2. jdbcUrl 以“jdbc:TAOS-WS://”开头; +3. 使用 6041 作为连接端口。 + +对于 WebSocket 连接,url 中的配置参数如下: +- user:登录 TDengine 用户名,默认值 'root'。 +- password:用户登录密码,默认值 'taosdata'。 +- charset: 当开启批量拉取数据时,指定解析字符串数据的字符集。 +- batchErrorIgnore:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 SQL 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 +- httpConnectTimeout: 连接超时时间,单位 ms, 默认值为 60000。 +- messageWaitTimeout: 消息超时时间, 单位 ms, 默认值为 60000。 +- useSSL: 连接中是否使用 SSL。 + +**注意**:部分配置项(比如:locale、timezone)在 WebSocket 连接中不生效。 + +**REST 连接** +使用 JDBC REST 连接,不需要依赖客户端驱动。与 JDBC 原生连接相比,仅需要: 1. driverClass 指定为“com.taosdata.jdbc.rs.RestfulDriver”; 2. jdbcUrl 以“jdbc:TAOS-RS://”开头; 3. 使用 6041 作为连接端口。 -对于 Websocket 和 REST 连接,url 中的配置参数如下: +对于 REST 连接,url 中的配置参数如下: - user:登录 TDengine 用户名,默认值 'root'。 - password:用户登录密码,默认值 'taosdata'。 -- batchfetch: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。逐行拉取结果集使用 HTTP 方式进行数据传输。JDBC REST 连接支持批量拉取数据功能。taos-jdbcdriver 与 TDengine 之间通过 WebSocket 连接进行数据传输。相较于 HTTP,WebSocket 可以使 JDBC REST 连接支持大数据量查询,并提升查询性能。 - charset: 当开启批量拉取数据时,指定解析字符串数据的字符集。 - batchErrorIgnore:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 SQL 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 - httpConnectTimeout: 连接超时时间,单位 ms, 默认值为 60000。 -- httpSocketTimeout: socket 超时时间,单位 ms,默认值为 60000。仅在 batchfetch 设置为 false 时生效。 -- messageWaitTimeout: 消息超时时间, 单位 ms, 默认值为 60000。 仅在 batchfetch 设置为 true 时生效。 +- httpSocketTimeout: socket 超时时间,单位 ms,默认值为 60000。 - useSSL: 连接中是否使用 SSL。 - httpPoolSize: REST 并发请求大小,默认 20。 @@ -272,7 +285,7 @@ TDengine 中,只要保证 firstEp 和 secondEp 中一个节点有效,就可 properties 中的配置参数如下: - TSDBDriver.PROPERTY_KEY_USER:登录 TDengine 用户名,默认值 'root'。 - TSDBDriver.PROPERTY_KEY_PASSWORD:用户登录密码,默认值 'taosdata'。 -- TSDBDriver.PROPERTY_KEY_BATCH_LOAD: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。 +- TSDBDriver.PROPERTY_KEY_BATCH_LOAD: true:在执行查询时批量拉取结果集;false:逐行拉取结果集。默认值为:false。因历史原因使用 REST 连接时,若设置此参数为 true 会变成 WebSocket 连接。 - TSDBDriver.PROPERTY_KEY_BATCH_ERROR_IGNORE:true:在执行 Statement 的 executeBatch 时,如果中间有一条 SQL 执行失败,继续执行下面的 sq 了。false:不再执行失败 SQL 后的任何语句。默认值为:false。 - TSDBDriver.PROPERTY_KEY_CONFIG_DIR:仅在使用 JDBC 原生连接时生效。客户端配置文件目录路径,Linux OS 上默认值 `/etc/taos`,Windows OS 上默认值 `C:/TDengine/cfg`。 - TSDBDriver.PROPERTY_KEY_CHARSET:客户端使用的字符集,默认值为系统字符集。 @@ -280,16 +293,16 @@ properties 中的配置参数如下: - TSDBDriver.PROPERTY_KEY_TIME_ZONE:仅在使用 JDBC 原生连接时生效。 客户端使用的时区,默认值为系统当前时区。因为历史的原因,我们只支持POSIX标准的部分规范,如UTC-8(代表中国上上海), GMT-8,Asia/Shanghai 这几种形式。 - TSDBDriver.HTTP_CONNECT_TIMEOUT: 连接超时时间,单位 ms, 默认值为 60000。仅在 REST 连接时生效。 - TSDBDriver.HTTP_SOCKET_TIMEOUT: socket 超时时间,单位 ms,默认值为 60000。仅在 REST 连接且 batchfetch 设置为 false 时生效。 -- TSDBDriver.PROPERTY_KEY_MESSAGE_WAIT_TIMEOUT: 消息超时时间, 单位 ms, 默认值为 60000。 仅在 REST 连接且 batchfetch 设置为 true 时生效。 -- TSDBDriver.PROPERTY_KEY_USE_SSL: 连接中是否使用 SSL。仅在 REST 连接时生效。 +- TSDBDriver.PROPERTY_KEY_MESSAGE_WAIT_TIMEOUT: 消息超时时间, 单位 ms, 默认值为 60000。 仅 WebSocket 连接下有效。 +- TSDBDriver.PROPERTY_KEY_USE_SSL: 连接中是否使用 SSL。仅在 WebSocket/REST 连接时生效。 - TSDBDriver.HTTP_POOL_SIZE: REST 并发请求大小,默认 20。 -- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 REST/Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 -- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 REST/WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 > **注意**:启用自动重连仅对简单执行 SQL 语句以及 无模式写入、数据订阅有效。对于参数绑定无效。自动重连仅对连接建立时通过参数指定数据库有效,对后面的 `use db` 语句切换数据库无效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_INTERVAL_MS: 自动重连重试间隔,单位毫秒,默认值 2000。仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_RETRY_COUNT: 自动重连重试次数,默认值 3,仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 -- TSDBDriver.PROPERTY_KEY_DISABLE_SSL_CERT_VALIDATION: 关闭 SSL 证书验证 。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_DISABLE_SSL_CERT_VALIDATION: 关闭 SSL 证书验证 。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 此外对 JDBC 原生连接,通过指定 URL 和 Properties 还可以指定其他参数,比如日志级别、SQL 长度等。 @@ -1154,7 +1167,7 @@ JDBC 驱动支持标准的 ResultSet 接口,提供了用于读取结果集中 PreparedStatement 允许使用预编译的 SQL 语句,这可以提高性能并提供参数化查询的能力,从而增加安全性。 JDBC 驱动提供了实现 PreparedStatement 接口的两个类: 1. 对应原生连接的 TSDBPreparedStatement -2. 对应 Websocket 连接的 TSWSPreparedStatement +2. 对应 WebSocket 连接的 TSWSPreparedStatement 因 JDBC 标准没有高性能绑定数据的接口,TSDBPreparedStatement 和 TSWSPreparedStatement 都新增了一些方法,用来扩展参数绑定能力。 > **注意**:由于 PreparedStatement 继承了 Statement 接口,因此对于这部分重复的接口不再赘述,请参考 Statement 接口中对应描述。 @@ -1347,8 +1360,8 @@ JDBC 标准不支持数据订阅,因此本章所有接口都是扩展接口。 - httpConnectTimeout: 创建连接超时参数,单位 ms,默认为 5000 ms。仅在 WebSocket 连接下有效。 - messageWaitTimeout: 数据传输超时参数,单位 ms,默认为 10000 ms。仅在 WebSocket 连接下有效。 - httpPoolSize: 同一个连接下最大并行请求数。仅在 WebSocket 连接下有效。 -- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 -- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 Websocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_COMPRESSION: 传输过程是否启用压缩。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 +- TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT: 是否启用自动重连。仅在使用 WebSocket 连接时生效。true: 启用,false: 不启用。默认为 false。 - TSDBDriver.PROPERTY_KEY_RECONNECT_INTERVAL_MS: 自动重连重试间隔,单位毫秒,默认值 2000。仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 - TSDBDriver.PROPERTY_KEY_RECONNECT_RETRY_COUNT: 自动重连重试次数,默认值 3,仅在 PROPERTY_KEY_ENABLE_AUTO_RECONNECT 为 true 时生效。 diff --git a/docs/zh/14-reference/05-connector/26-rust.mdx b/docs/zh/14-reference/05-connector/26-rust.mdx index 1fcc1e3dcd..cfabed4c61 100644 --- a/docs/zh/14-reference/05-connector/26-rust.mdx +++ b/docs/zh/14-reference/05-connector/26-rust.mdx @@ -18,9 +18,9 @@ import RequestId from "./_request_id.mdx"; ## 连接方式 -`taos` 提供两种建立连接的方式。一般我们推荐使用 **Websocket 连接**。 +`taos` 提供两种建立连接的方式。一般我们推荐使用 **WebSocket 连接**。 - **原生连接**,它通过 TDengine 客户端驱动程序(taosc)连接 TDengine 运行实例。 -- **Websocket 连接**,它通过 taosAdapter 的 Websocket 接口连接 TDengine 运行实例。 +- **WebSocket 连接**,它通过 taosAdapter 的 WebSocket 接口连接 TDengine 运行实例。 你可以通过不同的 “特性(即 Cargo 关键字 `features`)” 来指定使用哪种连接器(默认同时支持)。 @@ -29,13 +29,13 @@ import RequestId from "./_request_id.mdx"; ## 支持的平台 原生连接支持的平台和 TDengine 客户端驱动支持的平台一致。 -Websocket 连接支持所有能运行 Rust 的平台。 +WebSocket 连接支持所有能运行 Rust 的平台。 ## 版本历史 | Rust 连接器版本 | TDengine 版本 | 主要功能 | | :----------------: | :--------------: | :--------------------------------------------------: | -| v0.12.3 | 3.3.0.0 or later | 优化了 Websocket 查询和插入性能,支持了 VARBINARY 和 GEOMETRY 类型 | +| v0.12.3 | 3.3.0.0 or later | 优化了 WebSocket 查询和插入性能,支持了 VARBINARY 和 GEOMETRY 类型 | | v0.12.0 | 3.2.3.0 or later | WS 支持压缩。 | | v0.11.0 | 3.2.0.0 | TMQ 功能优化。 | | v0.10.0 | 3.1.0.0 | WS endpoint 变更。 | @@ -115,15 +115,15 @@ DSN 描述字符串基本结构如下: - **driver**: 必须指定驱动名以便连接器选择何种方式创建连接,支持如下驱动名: - **taos**: 使用 TDengine 连接器驱动,默认是使用 taos 驱动。 - **tmq**: 使用 TMQ 订阅数据。 -- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 Websocket 方式建立连接。 - - **http/ws**: 使用 Websocket 创建连接。 - - **https/wss**: 在 Websocket 连接方式下显示启用 SSL/TLS 连接。 +- **protocol**: 显示指定以何种方式建立连接,例如:`taos+ws://localhost:6041` 指定以 WebSocket 方式建立连接。 + - **http/ws**: 使用 WebSocket 创建连接。 + - **https/wss**: 在 WebSocket 连接方式下显示启用 SSL/TLS 连接。 - **username/password**: 用于创建连接的用户名及密码。 -- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时(`taos://`),原生连接默认为 `localhost:6030`,Websocket 连接默认为 `localhost:6041` 。 +- **host/port**: 指定创建连接的服务器及端口,当不指定服务器地址及端口时(`taos://`),原生连接默认为 `localhost:6030`,WebSocket 连接默认为 `localhost:6041` 。 - **database**: 指定默认连接的数据库名,可选参数。 - **params**:其他可选参数。 -一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 Websocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 +一个完整的 DSN 描述字符串示例如下:`taos+ws://localhost:6041/test`, 表示使用 WebSocket(`ws`)方式通过 `6041` 端口连接服务器 `localhost`,并指定默认数据库为 `test`。 #### TaosBuilder TaosBuilder 结构体主要提供了根据 DSN 构建 Taos 对象的方法,还提供了检查连接,以及获取客户端版本号等功能。 diff --git a/docs/zh/14-reference/05-connector/30-python.mdx b/docs/zh/14-reference/05-connector/30-python.mdx index 8e08bfc103..8436c30249 100644 --- a/docs/zh/14-reference/05-connector/30-python.mdx +++ b/docs/zh/14-reference/05-connector/30-python.mdx @@ -14,10 +14,10 @@ import RequestId from "./_request_id.mdx"; Python 连接器的源码托管在 [GitHub](https://github.com/taosdata/taos-connector-python)。 ## 连接方式 -`taospy`主要提供三种形式的连接器。一般我们推荐使用 **Websocket 连接**。 +`taospy`主要提供三种形式的连接器。一般我们推荐使用 **WebSocket 连接**。 - **原生连接**,对应 `taospy` 包的 `taos` 模块。通过 TDengine 客户端驱动程序(taosc)原生连接 TDengine 实例,支持数据写入、查询、数据订阅、schemaless 接口和参数绑定接口等功能。 - **REST 连接**,对应 `taospy` 包的 `taosrest` 模块。通过 taosAdapter 提供的 HTTP 接口连接 TDengine 实例,不支持 schemaless 和数据订阅等特性。 -- **Websocket 连接**,对应 `taos-ws-py` 包,可以选装。通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。 +- **WebSocket 连接**,对应 `taos-ws-py` 包,可以选装。通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) @@ -48,9 +48,9 @@ Python 连接器的源码托管在 [GitHub](https://github.com/taosdata/taos-con |2.7.9|数据订阅支持获取消费进度和重置消费进度| |2.7.8|新增 `execute_many`| -|Python Websocket Connector 版本|主要变化| +|Python WebSocket Connector 版本|主要变化| |:----------------------------:|:-----:| -|0.3.2|优化 Websocket sql 查询和插入性能,修改 readme 和 文档,修复已知问题| +|0.3.2|优化 WebSocket sql 查询和插入性能,修改 readme 和 文档,修复已知问题| |0.2.9|已知问题修复| |0.2.5|1. 数据订阅支持获取消费进度和重置消费进度
2. 支持 schemaless
3. 支持 STMT| |0.2.4|数据订阅新增取消订阅方法| diff --git a/docs/zh/14-reference/05-connector/35-node.mdx b/docs/zh/14-reference/05-connector/35-node.mdx index bd2ca537e3..6ac34d2471 100644 --- a/docs/zh/14-reference/05-connector/35-node.mdx +++ b/docs/zh/14-reference/05-connector/35-node.mdx @@ -14,7 +14,7 @@ Node.js 连接器源码托管在 [GitHub](https://github.com/taosdata/taos-conne ## 连接方式 -Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例。 +Node.js 连接器目前仅支持 WebSocket 连接器, 其通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例。 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) @@ -48,7 +48,7 @@ Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter | 107 | unknown sql type in tdengine | 请检查 TDengine 支持的 Data Type 类型。 | | 108 | connection has been closed | 连接已经关闭,请检查 Connection 是否关闭后再次使用,或是连接是否正常。 | | 109 | fetch block data parse fail | 获取到的查询数据,解析失败 | -| 110 | websocket connection has reached its maximum limit | Websocket 连接达到上限 | +| 110 | websocket connection has reached its maximum limit | WebSocket 连接达到上限 | - [TDengine Node.js Connector Error Code](https://github.com/taosdata/taos-connector-node/blob/main/nodejs/src/common/wsError.ts) - TDengine 其他功能模块的报错,请参考 [错误码](../../../reference/error-code) @@ -104,7 +104,7 @@ Node.js 连接器目前仅支持 Websocket 连接器, 其通过 taosAdapter ## API 参考 -Node.js 连接器(`@tdengine/websocket`), 其通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例。 +Node.js 连接器(`@tdengine/websocket`), 其通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例。 ### URL 规范 diff --git a/docs/zh/14-reference/05-connector/40-csharp.mdx b/docs/zh/14-reference/05-connector/40-csharp.mdx index 93f592fdd0..e4e778eeff 100644 --- a/docs/zh/14-reference/05-connector/40-csharp.mdx +++ b/docs/zh/14-reference/05-connector/40-csharp.mdx @@ -14,7 +14,7 @@ import RequestId from "./_request_id.mdx"; `TDengine.Connector` 提供两种形式的连接器 * **原生连接**,通过 TDengine 客户端驱动程序(taosc)原生连接 TDengine 实例,支持数据写入、查询、数据订阅、schemaless 接口和参数绑定接口等功能。 -* **Websocket 连接**,通过 taosAdapter 提供的 Websocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。(自 v3.0.1 起) +* **WebSocket 连接**,通过 taosAdapter 提供的 WebSocket 接口连接 TDengine 实例,WebSocket 连接实现的功能集合和原生连接有少量不同。(自 v3.0.1 起) 连接方式的详细介绍请参考:[连接方式](../../../develop/connect/#连接方式) diff --git a/docs/zh/14-reference/05-connector/index.md b/docs/zh/14-reference/05-connector/index.md index 04a2ef6c1f..bd2cff6a3d 100644 --- a/docs/zh/14-reference/05-connector/index.md +++ b/docs/zh/14-reference/05-connector/index.md @@ -62,7 +62,7 @@ TDengine 版本更新往往会增加新的功能特性,列表中的连接器 | **连接管理** | 支持 | 支持 | 支持 | | **执行 SQL** | 支持 | 支持 | 支持 | -### 使用 Websocket 接口 +### 使用 WebSocket 接口 | **功能特性** | **Java** | **Python** | **Go** | **C#** | **Node.js** | **Rust** | **C/C++** | | ------------------- | -------- | ---------- | ------ | ------ | ----------- | -------- | --------- | From 343421f8060579dc7dc86db7d38d749b6e4746f9 Mon Sep 17 00:00:00 2001 From: sheyanjie-qq <249478495@qq.com> Date: Fri, 18 Oct 2024 17:28:58 +0800 Subject: [PATCH 036/102] update jdbc driver version --- docs/examples/JDBC/JDBCDemo/pom.xml | 2 +- docs/examples/JDBC/connectionPools/pom.xml | 2 +- docs/examples/JDBC/consumer-demo/pom.xml | 2 +- docs/examples/JDBC/taosdemo/pom.xml | 2 +- docs/examples/java/pom.xml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/examples/JDBC/JDBCDemo/pom.xml b/docs/examples/JDBC/JDBCDemo/pom.xml index 315b147cce..4b3e1ab675 100644 --- a/docs/examples/JDBC/JDBCDemo/pom.xml +++ b/docs/examples/JDBC/JDBCDemo/pom.xml @@ -19,7 +19,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 org.locationtech.jts diff --git a/docs/examples/JDBC/connectionPools/pom.xml b/docs/examples/JDBC/connectionPools/pom.xml index 1791bfe8bc..e3ef30d2f8 100644 --- a/docs/examples/JDBC/connectionPools/pom.xml +++ b/docs/examples/JDBC/connectionPools/pom.xml @@ -18,7 +18,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 diff --git a/docs/examples/JDBC/consumer-demo/pom.xml b/docs/examples/JDBC/consumer-demo/pom.xml index dcabfc1249..709f87d9c1 100644 --- a/docs/examples/JDBC/consumer-demo/pom.xml +++ b/docs/examples/JDBC/consumer-demo/pom.xml @@ -17,7 +17,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 com.google.guava diff --git a/docs/examples/JDBC/taosdemo/pom.xml b/docs/examples/JDBC/taosdemo/pom.xml index ffe159ea49..ab5912aa9e 100644 --- a/docs/examples/JDBC/taosdemo/pom.xml +++ b/docs/examples/JDBC/taosdemo/pom.xml @@ -67,7 +67,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 diff --git a/docs/examples/java/pom.xml b/docs/examples/java/pom.xml index f23b73177e..e1a9504249 100644 --- a/docs/examples/java/pom.xml +++ b/docs/examples/java/pom.xml @@ -22,7 +22,7 @@ com.taosdata.jdbc taos-jdbcdriver - 3.3.3 + 3.4.0 From 93e65a308ed8dc6024fbd33075b44c7341c9db34 Mon Sep 17 00:00:00 2001 From: Yu Chen <74105241+yu285@users.noreply.github.com> Date: Mon, 21 Oct 2024 09:57:53 +0800 Subject: [PATCH 037/102] docs:Update 02-insert.md --- docs/zh/05-basic/02-insert.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/05-basic/02-insert.md b/docs/zh/05-basic/02-insert.md index 0c7ffd86a4..88d131e832 100644 --- a/docs/zh/05-basic/02-insert.md +++ b/docs/zh/05-basic/02-insert.md @@ -111,7 +111,7 @@ TDengine 还支持直接向超级表写入数据。需要注意的是,超级 ```sql insert into meters (tbname, ts, current, voltage, phase, location, group_id) -values( "d1001v, "2018-10-03 14:38:05", 10.2, 220, 0.23, "California.SanFrancisco", 2) +values( "d1001, "2018-10-03 14:38:05", 10.2, 220, 0.23, "California.SanFrancisco", 2) ``` ### 零代码写入 From 65ef0910f09d5ded998d828078b031645d2a558c Mon Sep 17 00:00:00 2001 From: Yu Chen <74105241+yu285@users.noreply.github.com> Date: Mon, 21 Oct 2024 10:24:10 +0800 Subject: [PATCH 038/102] Update 01-data-type.md --- docs/zh/14-reference/03-taos-sql/01-data-type.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/zh/14-reference/03-taos-sql/01-data-type.md b/docs/zh/14-reference/03-taos-sql/01-data-type.md index f33592366a..54106cc578 100644 --- a/docs/zh/14-reference/03-taos-sql/01-data-type.md +++ b/docs/zh/14-reference/03-taos-sql/01-data-type.md @@ -42,8 +42,8 @@ CREATE DATABASE db_name PRECISION 'ns'; | 14 | NCHAR | 自定义 | 记录包含多字节字符在内的字符串,如中文字符。每个 NCHAR 字符占用 4 字节的存储空间。字符串两端使用单引号引用,字符串内的单引号需用转义字符 `\'`。NCHAR 使用时须指定字符串大小,类型为 NCHAR(10) 的列表示此列的字符串最多存储 10 个 NCHAR 字符。如果用户字符串长度超出声明长度,将会报错。 | | 15 | JSON | | JSON 数据类型, 只有 Tag 可以是 JSON 格式 | | 16 | VARCHAR | 自定义 | BINARY 类型的别名 | -| 17 | GEOMETRY | 自定义 | 几何类型 -| 18 | VARBINARY | 自定义 | 可变长的二进制数据| +| 17 | GEOMETRY | 自定义 | 几何类型,3.1.0.0 版本开始支持 +| 18 | VARBINARY | 自定义 | 可变长的二进制数据, 3.1.1.0 版本开始支持| :::note From 74dc901dda20846c2465841d1a41fe47d268c62e Mon Sep 17 00:00:00 2001 From: Yu Chen <74105241+yu285@users.noreply.github.com> Date: Mon, 21 Oct 2024 11:12:19 +0800 Subject: [PATCH 039/102] Update 02-taosc.md --- docs/zh/14-reference/01-components/02-taosc.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/01-components/02-taosc.md b/docs/zh/14-reference/01-components/02-taosc.md index 32baac9a3b..5f22ebe8d5 100755 --- a/docs/zh/14-reference/01-components/02-taosc.md +++ b/docs/zh/14-reference/01-components/02-taosc.md @@ -10,7 +10,7 @@ TDengine 客户端驱动提供了应用编程所需要的全部 API,并且在 | 参数名称 | 参数含义 | |:-----------:|:----------------------------------------------------------:| -|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:localhost:6030 | +|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:${hostname}:6030,若无法获取 ${hostname},则赋值为 localhost | |secondEp | 启动时,如果 firstEp 连接不上,尝试连接集群中第二个 dnode 的 endpoint,没有缺省值 | |numOfRpcSessions | 一个客户端能创建的最大连接数,取值范围:10-50000000(单位为毫秒);缺省值:500000 | |telemetryReporting | 是否上传 telemetry,0: 不上传,1: 上传;缺省值:1 | From 35c200e6ecb06af506ec1d398487a135ad5db55e Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Mon, 21 Oct 2024 17:14:55 +0800 Subject: [PATCH 040/102] tcs/test: ut for s3 based tcs --- source/libs/azure/src/az.cpp | 2 +- source/libs/tcs/test/tcsTest.cpp | 242 ++++++++++++++++++++++++++----- 2 files changed, 209 insertions(+), 35 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 7e4b711fb4..1422705011 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -444,7 +444,7 @@ int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { } else { (void)snprintf(fileName, PATH_MAX, "%s%s", path, tmp); } - if (!azGetObjectToFile(blobName.c_str(), fileName)) { + if (azGetObjectToFile(blobName.c_str(), fileName)) { TAOS_RETURN(TSDB_CODE_FAILED); } } diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index 33566f6400..d07513c644 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -9,8 +9,6 @@ int32_t tcsInitEnv(int8_t isBlob) { int32_t code = 0; - extern int8_t tsS3EpNum; - extern char tsS3Hostname[][TSDB_FQDN_LEN]; extern char tsS3AccessKeyId[][TSDB_FQDN_LEN]; extern char tsS3AccessKeySecret[][TSDB_FQDN_LEN]; @@ -36,10 +34,22 @@ int32_t tcsInitEnv(int8_t isBlob) { tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); } else { + /* const char *hostname = "endpoint/.blob.core.windows.net"; const char *accessKeyId = ""; const char *accessKeySecret = ""; const char *bucketName = ""; + */ + + // const char *hostname = "http://192.168.1.52:9000"; + // const char *accessKeyId = "zOgllR6bSnw2Ah3mCNel"; + // const char *accessKeySecret = "cdO7oXAu3Cqdb1rUdevFgJMi0LtRwCXdWKQx4bhX"; + // const char *bucketName = "test-bucket"; + const char *hostname = "192.168.1.52:9000"; + const char *accessKeyId = "fGPPyYjzytw05nw44ViA"; + const char *accessKeySecret = "vK1VcwxgSOykicx6hk8fL1x15uEtyDSFU3w4hTaZ"; + + const char *bucketName = "ci-bucket19"; tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); @@ -47,7 +57,11 @@ int32_t tcsInitEnv(int8_t isBlob) { tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); // setup s3 env + extern int8_t tsS3EpNum; + extern int8_t tsS3Https[TSDB_MAX_EP_NUM]; + tsS3EpNum = 1; + tsS3Https[0] = false; } tstrncpy(tsTempDir, "/tmp/", PATH_MAX); @@ -61,7 +75,9 @@ int32_t tcsInitEnv(int8_t isBlob) { TEST(TcsTest, DISABLED_InterfaceTest) { // TEST(TcsTest, InterfaceTest) { - int code = 0; + int code = 0; + bool check = false; + bool withcp = false; code = tcsInitEnv(true); GTEST_ASSERT_EQ(code, 0); @@ -73,33 +89,113 @@ TEST(TcsTest, DISABLED_InterfaceTest) { code = tcsCheckCfg(); GTEST_ASSERT_EQ(code, 0); - /* - code = tcsPutObjectFromFileOffset(file, object_name, offset, size); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectBlock(object_name, offset, size, check, ppBlock); + + const int size = 4096; + char data[size] = {0}; + for (int i = 0; i < size / 2; ++i) { + data[i * 2 + 1] = 1; + } + + const char object_name[] = "tcsut.bin"; + char path[PATH_MAX] = {0}; + char path_download[PATH_MAX] = {0}; + int ds_len = strlen(TD_DIRSEP); + int tmp_len = strlen(tsTempDir); + + (void)snprintf(path, PATH_MAX, "%s", tsTempDir); + if (strncmp(tsTempDir + tmp_len - ds_len, TD_DIRSEP, ds_len) != 0) { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", TD_DIRSEP); + (void)snprintf(path + tmp_len + ds_len, PATH_MAX - tmp_len - ds_len, "%s", object_name); + } else { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", object_name); + } + + tstrncpy(path_download, path, strlen(path) + 1); + tstrncpy(path_download + strlen(path), ".download", strlen(".download") + 1); + + TdFilePtr fp = taosOpenFile(path, TD_FILE_WRITE | TD_FILE_CREATE | TD_FILE_WRITE_THROUGH); + GTEST_ASSERT_NE(fp, nullptr); + + int n = taosWriteFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); GTEST_ASSERT_EQ(code, 0); - tcsDeleteObjectsByPrefix(prefix); + code = tcsPutObjectFromFileOffset(path, object_name, 0, size); + GTEST_ASSERT_EQ(code, 0); + + uint8_t *pBlock = NULL; + code = tcsGetObjectBlock(object_name, 0, size, check, &pBlock); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(pBlock[i * 2], 0); + GTEST_ASSERT_EQ(pBlock[i * 2 + 1], 1); + } + + taosMemoryFree(pBlock); + + code = tcsGetObjectToFile(object_name, path_download); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + tcsDeleteObjectsByPrefix(object_name); // list object to check - code = tcsPutObjectFromFile2(file, object, withcp); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectsByPrefix(prefix, path); - GTEST_ASSERT_EQ(code, 0); - code = tcsDeleteObjects(object_name, nobject); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectToFile(object_name, fileName); + code = tcsPutObjectFromFile2(path, object_name, withcp); GTEST_ASSERT_EQ(code, 0); - // GTEST_ASSERT_NE(pEnv, nullptr); - */ + code = tcsGetObjectsByPrefix(object_name, tsTempDir); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + const char *object_name_arr[] = {object_name}; + code = tcsDeleteObjects(object_name_arr, 1); + GTEST_ASSERT_EQ(code, 0); tcsUninit(); } -TEST(TcsTest, DISABLED_InterfaceNonBlobTest) { - // TEST(TcsTest, InterfaceNonBlobTest) { - int code = 0; +// TEST(TcsTest, DISABLED_InterfaceNonBlobTest) { +TEST(TcsTest, InterfaceNonBlobTest) { + int code = 0; + bool check = false; + bool withcp = false; code = tcsInitEnv(false); GTEST_ASSERT_EQ(code, 0); @@ -111,26 +207,104 @@ TEST(TcsTest, DISABLED_InterfaceNonBlobTest) { code = tcsCheckCfg(); GTEST_ASSERT_EQ(code, 0); - /* - code = tcsPutObjectFromFileOffset(file, object_name, offset, size); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectBlock(object_name, offset, size, check, ppBlock); + + const int size = 4096; + char data[size] = {0}; + for (int i = 0; i < size / 2; ++i) { + data[i * 2 + 1] = 1; + } + + const char object_name[] = "tcsut.bin"; + char path[PATH_MAX] = {0}; + char path_download[PATH_MAX] = {0}; + int ds_len = strlen(TD_DIRSEP); + int tmp_len = strlen(tsTempDir); + + (void)snprintf(path, PATH_MAX, "%s", tsTempDir); + if (strncmp(tsTempDir + tmp_len - ds_len, TD_DIRSEP, ds_len) != 0) { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", TD_DIRSEP); + (void)snprintf(path + tmp_len + ds_len, PATH_MAX - tmp_len - ds_len, "%s", object_name); + } else { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", object_name); + } + + tstrncpy(path_download, path, strlen(path) + 1); + tstrncpy(path_download + strlen(path), ".download", strlen(".download") + 1); + + TdFilePtr fp = taosOpenFile(path, TD_FILE_WRITE | TD_FILE_CREATE | TD_FILE_WRITE_THROUGH); + GTEST_ASSERT_NE(fp, nullptr); + + int n = taosWriteFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); GTEST_ASSERT_EQ(code, 0); - tcsDeleteObjectsByPrefix(prefix); + code = tcsPutObjectFromFileOffset(path, object_name, 0, size); + GTEST_ASSERT_EQ(code, 0); + + uint8_t *pBlock = NULL; + code = tcsGetObjectBlock(object_name, 0, size, check, &pBlock); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(pBlock[i * 2], 0); + GTEST_ASSERT_EQ(pBlock[i * 2 + 1], 1); + } + + taosMemoryFree(pBlock); + + code = tcsGetObjectToFile(object_name, path_download); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + tcsDeleteObjectsByPrefix(object_name); // list object to check - code = tcsPutObjectFromFile2(file, object, withcp); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectsByPrefix(prefix, path); - GTEST_ASSERT_EQ(code, 0); - code = tcsDeleteObjects(object_name, nobject); - GTEST_ASSERT_EQ(code, 0); - code = tcsGetObjectToFile(object_name, fileName); + code = tcsPutObjectFromFile2(path, object_name, withcp); GTEST_ASSERT_EQ(code, 0); - // GTEST_ASSERT_NE(pEnv, nullptr); - */ + code = tcsGetObjectsByPrefix(object_name, tsTempDir); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + const char *object_name_arr[] = {object_name}; + code = tcsDeleteObjects(object_name_arr, 1); + GTEST_ASSERT_EQ(code, 0); tcsUninit(); } From e4b3f9135b09a445a2485120e35214c76d896c73 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Tue, 22 Oct 2024 09:09:40 +0800 Subject: [PATCH 041/102] fix fill with pesudo column exprs --- source/libs/planner/src/planLogicCreater.c | 173 +++++++++++++------ tests/system-test/2-query/fill_with_group.py | 63 +++++++ 2 files changed, 187 insertions(+), 49 deletions(-) diff --git a/source/libs/planner/src/planLogicCreater.c b/source/libs/planner/src/planLogicCreater.c index 6ad30eccb8..0d4da5c6f6 100644 --- a/source/libs/planner/src/planLogicCreater.c +++ b/source/libs/planner/src/planLogicCreater.c @@ -1196,74 +1196,128 @@ static int32_t createWindowLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSele return TSDB_CODE_FAILED; } +typedef struct SPartFillExprsCtx { + bool hasFillCol; + bool hasPseudoWinCol; + bool hasGroupKeyCol; + SHashObj* pPseudoCols; + int32_t code; +} SPartFillExprsCtx; + static EDealRes needFillValueImpl(SNode* pNode, void* pContext) { + SPartFillExprsCtx *pCtx = pContext; if (QUERY_NODE_COLUMN == nodeType(pNode)) { SColumnNode* pCol = (SColumnNode*)pNode; - if (COLUMN_TYPE_WINDOW_START != pCol->colType && COLUMN_TYPE_WINDOW_END != pCol->colType && - COLUMN_TYPE_WINDOW_DURATION != pCol->colType && COLUMN_TYPE_GROUP_KEY != pCol->colType) { - *(bool*)pContext = true; + if (COLUMN_TYPE_WINDOW_START == pCol->colType || COLUMN_TYPE_WINDOW_END == pCol->colType || + COLUMN_TYPE_WINDOW_DURATION == pCol->colType) { + pCtx->hasPseudoWinCol = true; + pCtx->code = taosHashPut(pCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); + } else if (COLUMN_TYPE_GROUP_KEY == pCol->colType || COLUMN_TYPE_TBNAME == pCol->colType || COLUMN_TYPE_TAG == pCol->colType) { + pCtx->hasGroupKeyCol = true; + pCtx->code = taosHashPut(pCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); + } else { + pCtx->hasFillCol = true; return DEAL_RES_END; } } return DEAL_RES_CONTINUE; } -static bool needFillValue(SNode* pNode) { - bool hasFillCol = false; - nodesWalkExpr(pNode, needFillValueImpl, &hasFillCol); - return hasFillCol; +static void needFillValue(SNode* pNode, SPartFillExprsCtx* pCtx) { + nodesWalkExpr(pNode, needFillValueImpl, pCtx); } -static int32_t partFillExprs(SSelectStmt* pSelect, SNodeList** pFillExprs, SNodeList** pNotFillExprs) { - int32_t code = TSDB_CODE_SUCCESS; - SNode* pProject = NULL; - FOREACH(pProject, pSelect->pProjectionList) { - if (needFillValue(pProject)) { - SNode* pNew = NULL; - code = nodesCloneNode(pProject, &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListMakeStrictAppend(pFillExprs, pNew); - } - } else if (QUERY_NODE_VALUE != nodeType(pProject)) { - SNode* pNew = NULL; - code = nodesCloneNode(pProject, &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListMakeStrictAppend(pNotFillExprs, pNew); - } - } - if (TSDB_CODE_SUCCESS != code) { - NODES_DESTORY_LIST(*pFillExprs); - NODES_DESTORY_LIST(*pNotFillExprs); - break; - } +typedef struct SCollectFillExprsCtx { + SHashObj* pPseudoCols; + int32_t code; + SNodeList* pFillExprs; + SNodeList* pNotFillExprs; + bool skipFillCols; +} SCollectFillExprsCtx; + +static EDealRes collectFillExpr(SNode* pNode, void* pContext) { + SCollectFillExprsCtx* pCollectFillCtx = pContext; + SPartFillExprsCtx partFillCtx = {0}; + SNode* pNew = NULL; + partFillCtx.pPseudoCols = pCollectFillCtx->pPseudoCols; + needFillValue(pNode, &partFillCtx); + if (partFillCtx.code != TSDB_CODE_SUCCESS) { + pCollectFillCtx->code = partFillCtx.code; + return DEAL_RES_ERROR; } - if (!pSelect->isDistinct) { - SNode* pOrderExpr = NULL; - FOREACH(pOrderExpr, pSelect->pOrderByList) { - SNode* pExpr = ((SOrderByExprNode*)pOrderExpr)->pExpr; - if (needFillValue(pExpr)) { - SNode* pNew = NULL; - code = nodesCloneNode(pExpr, &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListMakeStrictAppend(pFillExprs, pNew); - } - } else if (QUERY_NODE_VALUE != nodeType(pExpr)) { - SNode* pNew = NULL; - code = nodesCloneNode(pExpr, &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListMakeStrictAppend(pNotFillExprs, pNew); - } + + if (partFillCtx.hasFillCol && !pCollectFillCtx->skipFillCols) { + if (nodeType(pNode) == QUERY_NODE_ORDER_BY_EXPR) { + pCollectFillCtx->code = nodesCloneNode(((SOrderByExprNode*)pNode)->pExpr, &pNew); + } else { + pCollectFillCtx->code = nodesCloneNode(pNode, &pNew); + } + if (pCollectFillCtx->code == TSDB_CODE_SUCCESS) { + pCollectFillCtx->code = nodesListMakeStrictAppend(&pCollectFillCtx->pFillExprs, pNew); + } + if (pCollectFillCtx->code != TSDB_CODE_SUCCESS) return DEAL_RES_ERROR; + return DEAL_RES_IGNORE_CHILD; + } + return DEAL_RES_CONTINUE; +} + +static int32_t collectFillExprs(SSelectStmt* pSelect, SNodeList** pFillExprs, SNodeList** pNotFillExprs) { + int32_t code = TSDB_CODE_SUCCESS; + SCollectFillExprsCtx collectFillCtx = {0}; + collectFillCtx.pPseudoCols = taosHashInit(4, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + if (!collectFillCtx.pPseudoCols) return terrno; + + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + nodesWalkExprs(pSelect->pProjectionList, collectFillExpr, &collectFillCtx); + } + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + collectFillCtx.skipFillCols = true; + nodesWalkExpr(pSelect->pHaving, collectFillExpr, &collectFillCtx); + } + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + nodesWalkExprs(pSelect->pGroupByList, collectFillExpr, &collectFillCtx); + } + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + nodesWalkExprs(pSelect->pOrderByList, collectFillExpr, &collectFillCtx); + } + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + void* pIter = taosHashIterate(collectFillCtx.pPseudoCols, 0); + while (pIter) { + SNode* pNode = *(SNode**)pIter, *pNew = NULL; + collectFillCtx.code = nodesCloneNode(pNode, &pNew); + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + collectFillCtx.code = nodesListMakeStrictAppend(&collectFillCtx.pNotFillExprs, pNew); } - if (TSDB_CODE_SUCCESS != code) { - NODES_DESTORY_LIST(*pFillExprs); - NODES_DESTORY_LIST(*pNotFillExprs); + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + pIter = taosHashIterate(collectFillCtx.pPseudoCols, pIter); + } else { + taosHashCancelIterate(collectFillCtx.pPseudoCols, pIter); break; } } + if (collectFillCtx.code == TSDB_CODE_SUCCESS) { + TSWAP(*pFillExprs, collectFillCtx.pFillExprs); + TSWAP(*pNotFillExprs, collectFillCtx.pNotFillExprs); + } } + if (collectFillCtx.code != TSDB_CODE_SUCCESS) { + if (collectFillCtx.pFillExprs) nodesDestroyList(collectFillCtx.pFillExprs); + if (collectFillCtx.pNotFillExprs) nodesDestroyList(collectFillCtx.pNotFillExprs); + } + taosHashCleanup(collectFillCtx.pPseudoCols); return code; } +static bool nodeAlreadyContained(SNodeList* pList, SNode* pNode) { + SNode* pExpr = NULL; + FOREACH(pExpr, pList) { + if (nodesEqualNode(pExpr, pNode)) { + return true; + } + } + return false; +} + static int32_t createFillLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSelect, SLogicNode** pLogicNode) { if (NULL == pSelect->pWindow || QUERY_NODE_INTERVAL_WINDOW != nodeType(pSelect->pWindow) || NULL == ((SIntervalWindowNode*)pSelect->pWindow)->pFill) { @@ -1286,13 +1340,34 @@ static int32_t createFillLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSelect pFill->node.resultDataOrder = pFill->node.requireDataOrder; pFill->node.inputTsOrder = TSDB_ORDER_ASC; - code = partFillExprs(pSelect, &pFill->pFillExprs, &pFill->pNotFillExprs); + code = collectFillExprs(pSelect, &pFill->pFillExprs, &pFill->pNotFillExprs); if (TSDB_CODE_SUCCESS == code) { code = rewriteExprsForSelect(pFill->pFillExprs, pSelect, SQL_CLAUSE_FILL, NULL); } if (TSDB_CODE_SUCCESS == code) { code = rewriteExprsForSelect(pFill->pNotFillExprs, pSelect, SQL_CLAUSE_FILL, NULL); } + SNodeList* pWindowTargets = NULL; + if (TSDB_CODE_SUCCESS == code) { + SNode* pNode = NULL, *pNodeNew = NULL; + FOREACH(pNode, pCxt->pCurrRoot->pTargets) { + if (nodesEqualNode(pNode, pFillNode->pWStartTs)) continue; + if (nodeAlreadyContained(pFill->pFillExprs, pNode)) continue; + if (nodeAlreadyContained(pFill->pNotFillExprs, pNode)) continue; + pNodeNew = NULL; + code = nodesCloneNode(pNode, &pNodeNew); + if (TSDB_CODE_SUCCESS == code) { + code = nodesListMakeStrictAppend(&pWindowTargets, pNodeNew); + } + if (TSDB_CODE_SUCCESS != code) { + nodesDestroyList(pWindowTargets); + break; + } + } + } + if (TSDB_CODE_SUCCESS == code && LIST_LENGTH(pWindowTargets) > 0) { + code = nodesListMakeStrictAppendList(&pFill->pFillExprs, pWindowTargets); + } if (TSDB_CODE_SUCCESS == code) { code = createColumnByRewriteExprs(pFill->pFillExprs, &pFill->node.pTargets); } diff --git a/tests/system-test/2-query/fill_with_group.py b/tests/system-test/2-query/fill_with_group.py index 2139bbbfb3..b48143db15 100644 --- a/tests/system-test/2-query/fill_with_group.py +++ b/tests/system-test/2-query/fill_with_group.py @@ -237,11 +237,74 @@ class TDTestCase: tdSql.checkData(12, 1, None) tdSql.checkData(13, 1, None) + def test_fill_with_complex_expr(self): + sql = "SELECT _wstart, _wstart + 1d, count(*), now, 1+1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' INTERVAL(5m) FILL(NULL)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(12) + for i in range(0, 12, 2): + tdSql.checkData(i, 2, 10) + for i in range(1, 12, 2): + tdSql.checkData(i, 2, None) + for i in range(0, 12): + firstCol = tdSql.getData(i, 0) + secondCol = tdSql.getData(i, 1) + tdLog.debug(f"firstCol: {firstCol}, secondCol: {secondCol}, secondCol - firstCol: {secondCol - firstCol}") + if secondCol - firstCol != timedelta(days=1): + tdLog.exit(f"query error: secondCol - firstCol: {secondCol - firstCol}") + nowCol = tdSql.getData(i, 3) + if nowCol is None: + tdLog.exit(f"query error: nowCol: {nowCol}") + constCol = tdSql.getData(i, 4) + if constCol != 2: + tdLog.exit(f"query error: constCol: {constCol}") + + sql = "SELECT _wstart + 1d, count(*), last(ts) + 1a, timediff(_wend, last(ts)) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' INTERVAL(5m) FILL(NULL)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(12) + for i in range(0, 12, 2): + tdSql.checkData(i, 1, 10) + tdSql.checkData(i, 3, 300000) + for i in range(1, 12, 2): + tdSql.checkData(i, 1, None) + tdSql.checkData(i, 2, None) + tdSql.checkData(i, 3, None) + + sql = "SELECT count(*), tbname FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname INTERVAL(5m) FILL(NULL)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(120) + + sql = "SELECT * from (SELECT count(*), timediff(_wend, last(ts)) + t1, tbname FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) LIMIT 1) order by tbname" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(10) + j = 0 + for i in range(0, 10): + tdSql.checkData(i, 1, 300000 + j) + j = j + 1 + if j == 5: + j = 0 + + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, tbname,t1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) ORDER BY timediff(last(ts), _wstart)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(120) + + sql = "SELECT 1+1, count(*), timediff(_wend, last(ts)) + t1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) HAVING(timediff(last(ts), _wstart)+ t1 >= 1) ORDER BY timediff(last(ts), _wstart)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(48) + + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) HAVING(timediff(last(ts), _wstart) + t1 >= 1) ORDER BY timediff(last(ts), _wstart), tbname" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(48) + + sql = "SELECT count(*) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) having(timediff(last(ts), _wstart) >= 0)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(60) + def run(self): self.prepareTestEnv() self.test_partition_by_with_interval_fill_prev_new_group_fill_error() self.test_fill_with_order_by() self.test_fill_with_order_by2() + self.test_fill_with_complex_expr() def stop(self): tdSql.close() From aa14186f18b4f68a72c8a7e25d7173cfa65309d1 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 09:52:24 +0800 Subject: [PATCH 042/102] test/tcs, _azure_sdk: use env vars for blob ut --- contrib/test/azure/main.cpp | 89 ++++++++++++++++++++------------ source/libs/tcs/test/tcsTest.cpp | 46 +++++++++++++---- 2 files changed, 91 insertions(+), 44 deletions(-) diff --git a/contrib/test/azure/main.cpp b/contrib/test/azure/main.cpp index 943546a5fb..78ecc8b9f5 100644 --- a/contrib/test/azure/main.cpp +++ b/contrib/test/azure/main.cpp @@ -13,42 +13,67 @@ * along with this program. If not, see . */ -/** - * @file - * @brief Application that consumes the Azure SDK for C++. - * - * @remark Set environment variable `STORAGE_CONNECTION_STRING` before running the application. - * - */ - -#include - -#include #include +// Include the necessary SDK headers +#include +#include + +// Add appropriate using namespace directives +using namespace Azure::Storage; using namespace Azure::Storage::Blobs; -int main(int argc, char* argv[]) { - (void)argc; - (void)argv; +// Secrets should be stored & retrieved from secure locations such as Azure::KeyVault. For +// convenience and brevity of samples, the secrets are retrieved from environment variables. + +std::string GetEndpointUrl() { + // return std::getenv("AZURE_STORAGE_ACCOUNT_URL"); + std::string accountId = getenv("ablob_account_id"); + if (accountId.empty()) { + return accountId; + } + + return accountId + ".blob.core.windows.net"; +} + +std::string GetAccountName() { + // return std::getenv("AZURE_STORAGE_ACCOUNT_NAME"); + return getenv("ablob_account_id"); +} + +std::string GetAccountKey() { + // return std::getenv("AZURE_STORAGE_ACCOUNT_KEY"); + + return getenv("ablob_account_secret"); +} + +int main() { + std::string endpointUrl = GetEndpointUrl(); + std::string accountName = GetAccountName(); + std::string accountKey = GetAccountKey(); - /**************** Container SDK client ************************/ - /**************** Create container ************************/ try { - auto containerClient = - BlobContainerClient::CreateFromConnectionString(std::getenv("STORAGE_CONNECTION_STRING"), "td-test"); + auto sharedKeyCredential = std::make_shared(accountName, accountKey); + + std::string accountURL = "https://fd2d01cd892f844eeaa2273.blob.core.windows.net"; + BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential); + + std::string containerName = "myblobcontainer"; + // auto containerClient = blobServiceClient.GetBlobContainerClient("myblobcontainer"); + auto containerClient = blobServiceClient.GetBlobContainerClient("td-test"); // Create the container if it does not exist - // std::cout << "Creating container: " << containerName << std::endl; + std::cout << "Creating container: " << containerName << std::endl; // containerClient.CreateIfNotExists(); - /**************** Container SDK client ************************/ - /**************** list blobs (one page) ******************/ - // auto response = containerClient.ListBlobsSinglePage(); - // auto response = containerClient.ListBlobs(); - // auto blobListPage = response.Value; - // auto blobListPage = response.Blobs; - //(void)_azUploadFrom(blobClient, file, offset, size); + std::string blobName = "blob.txt"; + uint8_t blobContent[] = "Hello Azure!"; + // Create the block blob client + BlockBlobClient blobClient = containerClient.GetBlockBlobClient(blobName); + + // Upload the blob + std::cout << "Uploading blob: " << blobName << std::endl; + blobClient.UploadFrom(blobContent, sizeof(blobContent)); /* auto blockBlobClient = BlockBlobClient(endpointUrl, sharedKeyCredential); @@ -62,15 +87,11 @@ int main(int argc, char* argv[]) { std::cout << "Last modified date of uploaded blob: " << model.LastModified.ToString() << std::endl; */ + } catch (const Azure::Core::RequestFailedException& e) { + std::cout << "Status Code: " << static_cast(e.StatusCode) << ", Reason Phrase: " << e.ReasonPhrase + << std::endl; + std::cout << e.what() << std::endl; - for (auto page = containerClient.ListBlobs(/*options*/); page.HasPage(); page.MoveToNextPage()) { - for (auto& blob : page.Blobs) { - std::cout << blob.Name << std::endl; - } - } - - } catch (const std::exception& ex) { - std::cout << ex.what(); return 1; } diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index d07513c644..5e17d09fc7 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -1,4 +1,5 @@ #include + #include #include #include @@ -15,7 +16,7 @@ int32_t tcsInitEnv(int8_t isBlob) { extern char tsS3BucketName[TSDB_FQDN_LEN]; /* TCS parameter format - tsS3Hostname[0] = "endpoint/.blob.core.windows.net"; + tsS3Hostname[0] = "/.blob.core.windows.net"; tsS3AccessKeyId[0] = ""; tsS3AccessKeySecret[0] = ""; tsS3BucketName = ""; @@ -23,16 +24,38 @@ int32_t tcsInitEnv(int8_t isBlob) { tsS3Ablob = isBlob; if (isBlob) { - const char *hostname = "endpoint/.blob.core.windows.net"; + const char *hostname = "/.blob.core.windows.net"; const char *accessKeyId = ""; const char *accessKeySecret = ""; const char *bucketName = ""; - tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); - tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + if (hostname[0] != '<') { + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + } else { + const char *accountId = getenv("ablob_account_id"); + if (!accountId) { + return -1; + } + const char *accountSecret = getenv("ablob_account_secret"); + if (!accountSecret) { + return -1; + } + + const char *containerName = getenv("ablob_container"); + if (!containerName) { + return -1; + } + + TAOS_STRCPY(&tsS3Hostname[0][0], accountId); + TAOS_STRCAT(&tsS3Hostname[0][0], ".blob.core.windows.net"); + TAOS_STRCPY(&tsS3AccessKeyId[0][0], accountId); + TAOS_STRCPY(&tsS3AccessKeySecret[0][0], accountSecret); + TAOS_STRCPY(tsS3BucketName, containerName); + } } else { /* const char *hostname = "endpoint/.blob.core.windows.net"; @@ -67,19 +90,22 @@ int32_t tcsInitEnv(int8_t isBlob) { tstrncpy(tsTempDir, "/tmp/", PATH_MAX); tsS3Enabled = true; - if (!tsS3Ablob) { - } return code; } -TEST(TcsTest, DISABLED_InterfaceTest) { - // TEST(TcsTest, InterfaceTest) { +// TEST(TcsTest, DISABLED_InterfaceTest) { +TEST(TcsTest, InterfaceTest) { int code = 0; bool check = false; bool withcp = false; code = tcsInitEnv(true); + if (code) { + std::cout << "ablob env init failed with: " << code << std::endl; + return; + } + GTEST_ASSERT_EQ(code, 0); GTEST_ASSERT_EQ(tsS3Enabled, 1); GTEST_ASSERT_EQ(tsS3Ablob, 1); From f05ad0a553e83aac62678de3dfbe7567fbf91c9c Mon Sep 17 00:00:00 2001 From: dmchen Date: Tue, 22 Oct 2024 02:54:09 +0000 Subject: [PATCH 043/102] fix/TS-5532-set-seperate-thread-update-status --- source/dnode/mgmt/mgmt_dnode/src/dmWorker.c | 14 -------------- source/dnode/mgmt/node_mgmt/src/dmMgmt.c | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/source/dnode/mgmt/mgmt_dnode/src/dmWorker.c b/source/dnode/mgmt/mgmt_dnode/src/dmWorker.c index 7fc9920816..7f802f3837 100644 --- a/source/dnode/mgmt/mgmt_dnode/src/dmWorker.c +++ b/source/dnode/mgmt/mgmt_dnode/src/dmWorker.c @@ -47,7 +47,6 @@ static void *dmStatusThreadFp(void *param) { return NULL; } -extern SMonVloadInfo tsVinfo; static void *dmStatusInfoThreadFp(void *param) { SDnodeMgmt *pMgmt = param; int64_t lastTime = taosGetTimestampMs(); @@ -73,19 +72,6 @@ static void *dmStatusInfoThreadFp(void *param) { } } } - dDebug("begin to lock status info when thread exit"); - if (taosThreadMutexLock(&pMgmt->pData->statusInfolock) != 0) { - dError("failed to lock status info lock"); - return NULL; - } - if (tsVinfo.pVloads != NULL) { - taosArrayDestroy(tsVinfo.pVloads); - tsVinfo.pVloads = NULL; - } - if (taosThreadMutexUnlock(&pMgmt->pData->statusInfolock) != 0) { - dError("failed to unlock status info lock"); - return NULL; - } return NULL; } diff --git a/source/dnode/mgmt/node_mgmt/src/dmMgmt.c b/source/dnode/mgmt/node_mgmt/src/dmMgmt.c index 5e4f7163e7..1d6bbfa098 100644 --- a/source/dnode/mgmt/node_mgmt/src/dmMgmt.c +++ b/source/dnode/mgmt/node_mgmt/src/dmMgmt.c @@ -219,6 +219,7 @@ int32_t dmInitVars(SDnode *pDnode) { return 0; } +extern SMonVloadInfo tsVinfo; void dmClearVars(SDnode *pDnode) { for (EDndNodeType ntype = DNODE; ntype < NODE_END; ++ntype) { SMgmtWrapper *pWrapper = &pDnode->wrappers[ntype]; @@ -254,6 +255,23 @@ void dmClearVars(SDnode *pDnode) { (void)taosThreadRwlockUnlock(&pData->lock); (void)taosThreadRwlockDestroy(&pData->lock); + + dDebug("begin to lock status info when thread exit"); + if (taosThreadMutexLock(&pData->statusInfolock) != 0) { + dError("failed to lock status info lock"); + return; + } + if (tsVinfo.pVloads != NULL) { + taosArrayDestroy(tsVinfo.pVloads); + tsVinfo.pVloads = NULL; + } + if (taosThreadMutexUnlock(&pData->statusInfolock) != 0) { + dError("failed to unlock status info lock"); + return; + } + taosThreadMutexDestroy(&pData->statusInfolock); + memset(&pData->statusInfolock, 0, sizeof(pData->statusInfolock)); + (void)taosThreadMutexDestroy(&pDnode->mutex); memset(&pDnode->mutex, 0, sizeof(pDnode->mutex)); } From bc247650c5fe42e29882c3f7fa96aef9ccd2e018 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 12:35:32 +0800 Subject: [PATCH 044/102] test/az: use env vars for az testing cases --- source/libs/azure/src/az.cpp | 9 +- source/libs/azure/test/azTest.cpp | 161 ++++++++++++++++++++++++++---- source/libs/tcs/test/tcsTest.cpp | 15 +++ 3 files changed, 157 insertions(+), 28 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 1422705011..b3b5c7704a 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -204,9 +204,9 @@ _next: int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { int32_t code = 0; - std::string endpointUrl = tsS3Hostname[0]; // GetEndpointUrl(); - std::string accountName = tsS3AccessKeyId[0]; // GetAccountName(); - std::string accountKey = tsS3AccessKeySecret[0]; // GetAccountKey(); + std::string endpointUrl = tsS3Hostname[0]; + std::string accountName = tsS3AccessKeyId[0]; + std::string accountKey = tsS3AccessKeySecret[0]; try { auto sharedKeyCredential = std::make_shared(accountName, accountKey); @@ -226,9 +226,6 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int // TDBlockBlobClient blobClient(containerClient.GetBlobClient(blobName)); TDBlockBlobClient blobClient(containerClient.GetBlobClient(object_name)); - // Upload the blob - // std::cout << "Uploading blob: " << blobName << std::endl; - // blobClient.UploadFrom(blobContent, sizeof(blobContent)); blobClient.UploadFrom(file, offset, size); } catch (const Azure::Core::RequestFailedException &e) { /* diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp index 8d428fbb69..bb15bf11ec 100644 --- a/source/libs/azure/test/azTest.cpp +++ b/source/libs/azure/test/azTest.cpp @@ -1,3 +1,18 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + #include #include #include @@ -24,15 +39,38 @@ int32_t azInitEnv() { tsS3BucketName = ""; */ - const char *hostname = "endpoint/.blob.core.windows.net"; + const char *hostname = "/.blob.core.windows.net"; const char *accessKeyId = ""; const char *accessKeySecret = ""; const char *bucketName = ""; - tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); - tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); - tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + if (hostname[0] != '<') { + tstrncpy(&tsS3Hostname[0][0], hostname, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeyId[0][0], accessKeyId, TSDB_FQDN_LEN); + tstrncpy(&tsS3AccessKeySecret[0][0], accessKeySecret, TSDB_FQDN_LEN); + tstrncpy(tsS3BucketName, bucketName, TSDB_FQDN_LEN); + } else { + const char *accountId = getenv("ablob_account_id"); + if (!accountId) { + return -1; + } + + const char *accountSecret = getenv("ablob_account_secret"); + if (!accountSecret) { + return -1; + } + + const char *containerName = getenv("ablob_container"); + if (!containerName) { + return -1; + } + + TAOS_STRCPY(&tsS3Hostname[0][0], accountId); + TAOS_STRCAT(&tsS3Hostname[0][0], ".blob.core.windows.net"); + TAOS_STRCPY(&tsS3AccessKeyId[0][0], accountId); + TAOS_STRCPY(&tsS3AccessKeySecret[0][0], accountSecret); + TAOS_STRCPY(tsS3BucketName, containerName); + } tstrncpy(tsTempDir, "/tmp/", PATH_MAX); @@ -41,9 +79,11 @@ int32_t azInitEnv() { return code; } -TEST(AzTest, DISABLED_InterfaceTest) { - // TEST(AzTest, InterfaceTest) { - int code = 0; +// TEST(AzTest, DISABLED_InterfaceTest) { +TEST(AzTest, InterfaceTest) { + int code = 0; + bool check = false; + bool withcp = false; code = azInitEnv(); GTEST_ASSERT_EQ(code, 0); @@ -54,26 +94,103 @@ TEST(AzTest, DISABLED_InterfaceTest) { code = azCheckCfg(); GTEST_ASSERT_EQ(code, 0); - /* - code = azPutObjectFromFileOffset(file, object_name, offset, size); - GTEST_ASSERT_EQ(code, 0); - code = azGetObjectBlock(object_name, offset, size, check, ppBlock); + const int size = 4096; + char data[size] = {0}; + for (int i = 0; i < size / 2; ++i) { + data[i * 2 + 1] = 1; + } + + const char object_name[] = "azut.bin"; + char path[PATH_MAX] = {0}; + char path_download[PATH_MAX] = {0}; + int ds_len = strlen(TD_DIRSEP); + int tmp_len = strlen(tsTempDir); + + (void)snprintf(path, PATH_MAX, "%s", tsTempDir); + if (strncmp(tsTempDir + tmp_len - ds_len, TD_DIRSEP, ds_len) != 0) { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", TD_DIRSEP); + (void)snprintf(path + tmp_len + ds_len, PATH_MAX - tmp_len - ds_len, "%s", object_name); + } else { + (void)snprintf(path + tmp_len, PATH_MAX - tmp_len, "%s", object_name); + } + + tstrncpy(path_download, path, strlen(path) + 1); + tstrncpy(path_download + strlen(path), ".download", strlen(".download") + 1); + + TdFilePtr fp = taosOpenFile(path, TD_FILE_WRITE | TD_FILE_CREATE | TD_FILE_WRITE_THROUGH); + GTEST_ASSERT_NE(fp, nullptr); + + int n = taosWriteFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); GTEST_ASSERT_EQ(code, 0); - azDeleteObjectsByPrefix(prefix); + code = azPutObjectFromFileOffset(path, object_name, 0, size); + GTEST_ASSERT_EQ(code, 0); + + uint8_t *pBlock = NULL; + code = azGetObjectBlock(object_name, 0, size, check, &pBlock); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(pBlock[i * 2], 0); + GTEST_ASSERT_EQ(pBlock[i * 2 + 1], 1); + } + + taosMemoryFree(pBlock); + + code = azGetObjectToFile(object_name, path_download); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + azDeleteObjectsByPrefix(object_name); // list object to check - code = azPutObjectFromFile2(file, object, withcp); - GTEST_ASSERT_EQ(code, 0); - code = azGetObjectsByPrefix(prefix, path); - GTEST_ASSERT_EQ(code, 0); - code = azDeleteObjects(object_name, nobject); - GTEST_ASSERT_EQ(code, 0); - code = azGetObjectToFile(object_name, fileName); + code = azPutObjectFromFile2(path, object_name, withcp); GTEST_ASSERT_EQ(code, 0); - // GTEST_ASSERT_NE(pEnv, nullptr); - */ + code = azGetObjectsByPrefix(object_name, tsTempDir); + GTEST_ASSERT_EQ(code, 0); + + { + TdFilePtr fp = taosOpenFile(path, TD_FILE_READ); + GTEST_ASSERT_NE(fp, nullptr); + + (void)memset(data, 0, size); + + int64_t n = taosReadFile(fp, data, size); + GTEST_ASSERT_EQ(n, size); + + code = taosCloseFile(&fp); + GTEST_ASSERT_EQ(code, 0); + + for (int i = 0; i < size / 2; ++i) { + GTEST_ASSERT_EQ(data[i * 2], 0); + GTEST_ASSERT_EQ(data[i * 2 + 1], 1); + } + } + + const char *object_name_arr[] = {object_name}; + code = azDeleteObjects(object_name_arr, 1); + GTEST_ASSERT_EQ(code, 0); azEnd(); } diff --git a/source/libs/tcs/test/tcsTest.cpp b/source/libs/tcs/test/tcsTest.cpp index 5e17d09fc7..4b5afc5b85 100644 --- a/source/libs/tcs/test/tcsTest.cpp +++ b/source/libs/tcs/test/tcsTest.cpp @@ -1,3 +1,18 @@ +/* + * Copyright (c) 2019 TAOS Data, Inc. + * + * This program is free software: you can use, redistribute, and/or modify + * it under the terms of the GNU Affero General Public License, version 3 + * or later ("AGPL"), as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + #include #include From 1153a5222de848ca9d2ab682d682fc2c49bdd350 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 12:53:16 +0800 Subject: [PATCH 045/102] az/put from file offset: log error with status code and reason phrase --- source/libs/azure/src/az.cpp | 1 + source/libs/azure/test/azTest.cpp | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index b3b5c7704a..26a9e543da 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -233,6 +233,7 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int << std::endl; std::cout << e.what() << std::endl; */ + azError("%s: Status Code: %d, Reason Phrase: %s", __func__, static_cast(e.StatusCode), e.ReasonPhrase.c_str()); code = TAOS_SYSTEM_ERROR(EIO); azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); TAOS_RETURN(code); diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp index bb15bf11ec..c1be835150 100644 --- a/source/libs/azure/test/azTest.cpp +++ b/source/libs/azure/test/azTest.cpp @@ -33,7 +33,7 @@ int32_t azInitEnv() { extern char tsS3BucketName[TSDB_FQDN_LEN]; /* TCS parameter format - tsS3Hostname[0] = "endpoint/.blob.core.windows.net"; + tsS3Hostname[0] = "/.blob.core.windows.net"; tsS3AccessKeyId[0] = ""; tsS3AccessKeySecret[0] = ""; tsS3BucketName = ""; From ef730b08349158ae2343676638ef9bdf693059bf Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 13:04:38 +0800 Subject: [PATCH 046/102] az/put: remove cpp logs --- source/libs/azure/src/az.cpp | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 26a9e543da..d4f93b9af4 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -228,14 +228,11 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int blobClient.UploadFrom(file, offset, size); } catch (const Azure::Core::RequestFailedException &e) { - /* - std::cout << "Status Code: " << static_cast(e.StatusCode) << ", Reason Phrase: " << e.ReasonPhrase - << std::endl; - std::cout << e.what() << std::endl; - */ azError("%s: Status Code: %d, Reason Phrase: %s", __func__, static_cast(e.StatusCode), e.ReasonPhrase.c_str()); + code = TAOS_SYSTEM_ERROR(EIO); azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + TAOS_RETURN(code); } From 9c7eaa1633920ef1f1c2594c6cd5690f75b9c9bd Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 13:17:14 +0800 Subject: [PATCH 047/102] az/header: move header file to inc directory --- source/libs/azure/src/avro_parser.cpp | 531 ------------------ source/libs/azure/src/avro_parser.hpp | 198 ------- source/libs/azure/src/td_avro_parser.cpp | 2 +- .../libs/azure/src/td_block_blob_client.cpp | 2 +- 4 files changed, 2 insertions(+), 731 deletions(-) delete mode 100644 source/libs/azure/src/avro_parser.cpp delete mode 100644 source/libs/azure/src/avro_parser.hpp diff --git a/source/libs/azure/src/avro_parser.cpp b/source/libs/azure/src/avro_parser.cpp deleted file mode 100644 index 485980e007..0000000000 --- a/source/libs/azure/src/avro_parser.cpp +++ /dev/null @@ -1,531 +0,0 @@ -#if defined(USE_S3) -#include "avro_parser.hpp" - -#include -#include - -#include -#include - -namespace Azure { -namespace Storage { -namespace Blobs { -namespace _detail { - -namespace { -int64_t parseInt(AvroStreamReader::ReaderPos& data) { - uint64_t r = 0; - int nb = 0; - while (true) { - uint8_t c = (*data.BufferPtr)[data.Offset++]; - r = r | ((static_cast(c) & 0x7f) << (nb * 7)); - if (c & 0x80) { - ++nb; - continue; - } - break; - } - return static_cast(r >> 1) ^ -static_cast(r & 0x01); -} - -AvroSchema ParseSchemaFromJsonString(const std::string& jsonSchema) { - const static std::map BuiltinNameSchemaMap = { - {"string", AvroSchema::StringSchema}, {"bytes", AvroSchema::BytesSchema}, {"int", AvroSchema::IntSchema}, - {"long", AvroSchema::LongSchema}, {"float", AvroSchema::FloatSchema}, {"double", AvroSchema::DoubleSchema}, - {"boolean", AvroSchema::BoolSchema}, {"null", AvroSchema::NullSchema}, {"string", AvroSchema::StringSchema}, - }; - std::map nameSchemaMap = BuiltinNameSchemaMap; - - std::function parseSchemaFromJsonObject; - parseSchemaFromJsonObject = [&](const Core::Json::_internal::json& obj) -> AvroSchema { - if (obj.is_string()) { - auto typeName = obj.get(); - return nameSchemaMap.find(typeName)->second; - } else if (obj.is_array()) { - std::vector unionSchemas; - for (const auto& s : obj) { - unionSchemas.push_back(parseSchemaFromJsonObject(s)); - } - return AvroSchema::UnionSchema(std::move(unionSchemas)); - } else if (obj.is_object()) { - if (obj.count("namespace") != 0) { - throw std::runtime_error("Namespace isn't supported yet in Avro schema."); - } - if (obj.count("aliases") != 0) { - throw std::runtime_error("Alias isn't supported yet in Avro schema."); - } - auto typeName = obj["type"].get(); - auto i = nameSchemaMap.find(typeName); - if (i != nameSchemaMap.end()) { - return i->second; - } - if (typeName == "record") { - std::vector> fieldsSchema; - for (const auto& field : obj["fields"]) { - fieldsSchema.push_back( - std::make_pair(field["name"].get(), parseSchemaFromJsonObject(field["type"]))); - } - - const std::string recordName = obj["name"].get(); - auto recordSchema = AvroSchema::RecordSchema(recordName, std::move(fieldsSchema)); - nameSchemaMap.insert(std::make_pair(recordName, recordSchema)); - return recordSchema; - } else if (typeName == "enum") { - throw std::runtime_error("Enum type isn't supported yet in Avro schema."); - } else if (typeName == "array") { - return AvroSchema::ArraySchema(parseSchemaFromJsonObject(obj["items"])); - } else if (typeName == "map") { - return AvroSchema::MapSchema(parseSchemaFromJsonObject(obj["items"])); - } else if (typeName == "fixed") { - const std::string fixedName = obj["name"].get(); - auto fixedSchema = AvroSchema::FixedSchema(fixedName, obj["size"].get()); - nameSchemaMap.insert(std::make_pair(fixedName, fixedSchema)); - return fixedSchema; - } else { - throw std::runtime_error("Unrecognized type " + typeName + " in Avro schema."); - } - } - AZURE_UNREACHABLE_CODE(); - }; - - auto jsonRoot = Core::Json::_internal::json::parse(jsonSchema.begin(), jsonSchema.end()); - return parseSchemaFromJsonObject(jsonRoot); -} -} // namespace - -int64_t AvroStreamReader::ParseInt(const Core::Context& context) { - uint64_t r = 0; - int nb = 0; - while (true) { - Preload(1, context); - uint8_t c = m_streambuffer[m_pos.Offset++]; - - r = r | ((static_cast(c) & 0x7f) << (nb * 7)); - if (c & 0x80) { - ++nb; - continue; - } - break; - } - return static_cast(r >> 1) ^ -static_cast(r & 0x01); -} - -void AvroStreamReader::Advance(size_t n, const Core::Context& context) { - Preload(n, context); - m_pos.Offset += n; -} - -size_t AvroStreamReader::Preload(size_t n, const Core::Context& context) { - size_t oldAvailable = AvailableBytes(); - while (true) { - size_t newAvailable = TryPreload(n, context); - if (newAvailable >= n) { - return newAvailable; - } - if (oldAvailable == newAvailable) { - throw std::runtime_error("Unexpected EOF of Avro stream."); - } - oldAvailable = newAvailable; - } - AZURE_UNREACHABLE_CODE(); -} - -size_t AvroStreamReader::TryPreload(size_t n, const Core::Context& context) { - size_t availableBytes = AvailableBytes(); - if (availableBytes >= n) { - return availableBytes; - } - const size_t MinRead = 4096; - size_t tryReadSize = (std::max)(n, MinRead); - size_t currSize = m_streambuffer.size(); - m_streambuffer.resize(m_streambuffer.size() + tryReadSize); - size_t actualReadSize = m_stream->Read(m_streambuffer.data() + currSize, tryReadSize, context); - m_streambuffer.resize(currSize + actualReadSize); - return AvailableBytes(); -} - -void AvroStreamReader::Discard() { - constexpr size_t MinimumReleaseMemory = 128 * 1024; - if (m_pos.Offset < MinimumReleaseMemory) { - return; - } - const size_t availableBytes = AvailableBytes(); - std::memmove(&m_streambuffer[0], &m_streambuffer[m_pos.Offset], availableBytes); - m_streambuffer.resize(availableBytes); - m_pos.Offset = 0; -} - -const AvroSchema AvroSchema::StringSchema(AvroDatumType::String); -const AvroSchema AvroSchema::BytesSchema(AvroDatumType::Bytes); -const AvroSchema AvroSchema::IntSchema(AvroDatumType::Int); -const AvroSchema AvroSchema::LongSchema(AvroDatumType::Long); -const AvroSchema AvroSchema::FloatSchema(AvroDatumType::Float); -const AvroSchema AvroSchema::DoubleSchema(AvroDatumType::Double); -const AvroSchema AvroSchema::BoolSchema(AvroDatumType::Bool); -const AvroSchema AvroSchema::NullSchema(AvroDatumType::Null); - -AvroSchema AvroSchema::RecordSchema(std::string name, - const std::vector>& fieldsSchema) { - AvroSchema recordSchema(AvroDatumType::Record); - recordSchema.m_name = std::move(name); - recordSchema.m_status = std::make_shared(); - for (auto& i : fieldsSchema) { - recordSchema.m_status->m_keys.push_back(i.first); - recordSchema.m_status->m_schemas.push_back(i.second); - } - return recordSchema; -} - -AvroSchema AvroSchema::ArraySchema(AvroSchema elementSchema) { - AvroSchema arraySchema(AvroDatumType::Array); - arraySchema.m_status = std::make_shared(); - arraySchema.m_status->m_schemas.push_back(std::move(elementSchema)); - return arraySchema; -} - -AvroSchema AvroSchema::MapSchema(AvroSchema elementSchema) { - AvroSchema mapSchema(AvroDatumType::Map); - mapSchema.m_status = std::make_shared(); - mapSchema.m_status->m_schemas.push_back(std::move(elementSchema)); - return mapSchema; -} - -AvroSchema AvroSchema::UnionSchema(std::vector schemas) { - AvroSchema unionSchema(AvroDatumType::Union); - unionSchema.m_status = std::make_shared(); - unionSchema.m_status->m_schemas = std::move(schemas); - return unionSchema; -} - -AvroSchema AvroSchema::FixedSchema(std::string name, int64_t size) { - AvroSchema fixedSchema(AvroDatumType::Fixed); - fixedSchema.m_name = std::move(name); - fixedSchema.m_status = std::make_shared(); - fixedSchema.m_status->m_size = size; - return fixedSchema; -} - -void AvroDatum::Fill(AvroStreamReader& reader, const Core::Context& context) { - m_data = reader.m_pos; - if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { - int64_t stringSize = reader.ParseInt(context); - reader.Advance(static_cast(stringSize), context); - } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || - m_schema.Type() == AvroDatumType::Enum) { - reader.ParseInt(context); - } else if (m_schema.Type() == AvroDatumType::Float) { - reader.Advance(4, context); - } else if (m_schema.Type() == AvroDatumType::Double) { - reader.Advance(8, context); - } else if (m_schema.Type() == AvroDatumType::Bool) { - reader.Advance(1, context); - } else if (m_schema.Type() == AvroDatumType::Null) { - reader.Advance(0, context); - } else if (m_schema.Type() == AvroDatumType::Record) { - for (const auto& s : m_schema.FieldSchemas()) { - AvroDatum(s).Fill(reader, context); - } - } else if (m_schema.Type() == AvroDatumType::Array) { - while (true) { - int64_t numElementsInBlock = reader.ParseInt(context); - if (numElementsInBlock == 0) { - break; - } else if (numElementsInBlock < 0) { - int64_t blockSize = reader.ParseInt(context); - reader.Advance(static_cast(blockSize), context); - } else { - for (auto i = 0; i < numElementsInBlock; ++i) { - AvroDatum(m_schema.ItemSchema()).Fill(reader, context); - } - } - } - } else if (m_schema.Type() == AvroDatumType::Map) { - while (true) { - int64_t numElementsInBlock = reader.ParseInt(context); - if (numElementsInBlock == 0) { - break; - } else if (numElementsInBlock < 0) { - int64_t blockSize = reader.ParseInt(context); - reader.Advance(static_cast(blockSize), context); - } else { - for (int64_t i = 0; i < numElementsInBlock; ++i) { - AvroDatum(AvroSchema::StringSchema).Fill(reader, context); - AvroDatum(m_schema.ItemSchema()).Fill(reader, context); - } - } - } - } else if (m_schema.Type() == AvroDatumType::Union) { - int64_t i = reader.ParseInt(context); - AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(reader, context); - } else if (m_schema.Type() == AvroDatumType::Fixed) { - reader.Advance(m_schema.Size(), context); - } else { - AZURE_UNREACHABLE_CODE(); - } -} - -void AvroDatum::Fill(AvroStreamReader::ReaderPos& data) { - m_data = data; - if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { - int64_t stringSize = parseInt(data); - data.Offset += static_cast(stringSize); - } else if (m_schema.Type() == AvroDatumType::Int || m_schema.Type() == AvroDatumType::Long || - m_schema.Type() == AvroDatumType::Enum) { - parseInt(data); - } else if (m_schema.Type() == AvroDatumType::Float) { - data.Offset += 4; - } else if (m_schema.Type() == AvroDatumType::Double) { - data.Offset += 8; - } else if (m_schema.Type() == AvroDatumType::Bool) { - data.Offset += 1; - } else if (m_schema.Type() == AvroDatumType::Null) { - data.Offset += 0; - } else if (m_schema.Type() == AvroDatumType::Record) { - for (const auto& s : m_schema.FieldSchemas()) { - AvroDatum(s).Fill(data); - } - } else if (m_schema.Type() == AvroDatumType::Array) { - while (true) { - int64_t numElementsInBlock = parseInt(data); - if (numElementsInBlock == 0) { - break; - } else if (numElementsInBlock < 0) { - int64_t blockSize = parseInt(data); - data.Offset += static_cast(blockSize); - } else { - for (auto i = 0; i < numElementsInBlock; ++i) { - AvroDatum(m_schema.ItemSchema()).Fill(data); - } - } - } - } else if (m_schema.Type() == AvroDatumType::Map) { - while (true) { - int64_t numElementsInBlock = parseInt(data); - if (numElementsInBlock == 0) { - break; - } else if (numElementsInBlock < 0) { - int64_t blockSize = parseInt(data); - data.Offset += static_cast(blockSize); - } else { - for (int64_t i = 0; i < numElementsInBlock; ++i) { - AvroDatum(AvroSchema::StringSchema).Fill(data); - AvroDatum(m_schema.ItemSchema()).Fill(data); - } - } - } - } else if (m_schema.Type() == AvroDatumType::Union) { - int64_t i = parseInt(data); - AvroDatum(m_schema.FieldSchemas()[static_cast(i)]).Fill(data); - } else if (m_schema.Type() == AvroDatumType::Fixed) { - data.Offset += m_schema.Size(); - } else { - AZURE_UNREACHABLE_CODE(); - } -} - -template <> -AvroDatum::StringView AvroDatum::Value() const { - auto data = m_data; - if (m_schema.Type() == AvroDatumType::String || m_schema.Type() == AvroDatumType::Bytes) { - const int64_t length = parseInt(data); - const uint8_t* start = &(*data.BufferPtr)[data.Offset]; - StringView ret{start, static_cast(length)}; - data.Offset += static_cast(length); - return ret; - } - if (m_schema.Type() == AvroDatumType::Fixed) { - const size_t fixedSize = m_schema.Size(); - const uint8_t* start = &(*data.BufferPtr)[data.Offset]; - StringView ret{start, fixedSize}; - data.Offset += fixedSize; - return ret; - } - AZURE_UNREACHABLE_CODE(); -} - -template <> -std::string AvroDatum::Value() const { - auto stringView = Value(); - return std::string(stringView.Data, stringView.Data + stringView.Length); -} - -template <> -std::vector AvroDatum::Value() const { - auto stringView = Value(); - return std::vector(stringView.Data, stringView.Data + stringView.Length); -} - -template <> -int64_t AvroDatum::Value() const { - auto data = m_data; - return parseInt(data); -} - -template <> -int32_t AvroDatum::Value() const { - return static_cast(Value()); -} - -template <> -bool AvroDatum::Value() const { - return Value(); -} - -template <> -std::nullptr_t AvroDatum::Value() const { - return nullptr; -} - -template <> -AvroRecord AvroDatum::Value() const { - auto data = m_data; - - AvroRecord r; - r.m_keys = &m_schema.FieldNames(); - for (const auto& schema : m_schema.FieldSchemas()) { - auto datum = AvroDatum(schema); - datum.Fill(data); - r.m_values.push_back(std::move(datum)); - } - - return r; -} - -template <> -AvroMap AvroDatum::Value() const { - auto data = m_data; - - AvroMap m; - while (true) { - int64_t numElementsInBlock = parseInt(data); - if (numElementsInBlock == 0) { - break; - } - if (numElementsInBlock < 0) { - numElementsInBlock = -numElementsInBlock; - parseInt(data); - } - for (int64_t i = 0; i < numElementsInBlock; ++i) { - auto keyDatum = AvroDatum(AvroSchema::StringSchema); - keyDatum.Fill(data); - auto valueDatum = AvroDatum(m_schema.ItemSchema()); - valueDatum.Fill(data); - m[keyDatum.Value()] = valueDatum; - } - } - return m; -} - -template <> -AvroDatum AvroDatum::Value() const { - auto data = m_data; - if (m_schema.Type() == AvroDatumType::Union) { - int64_t i = parseInt(data); - auto datum = AvroDatum(m_schema.FieldSchemas()[static_cast(i)]); - datum.Fill(data); - return datum; - } - AZURE_UNREACHABLE_CODE(); -} - -AvroObjectContainerReader::AvroObjectContainerReader(Core::IO::BodyStream& stream) - : m_reader(std::make_unique(stream)) {} - -AvroDatum AvroObjectContainerReader::NextImpl(const AvroSchema* schema, const Core::Context& context) { - AZURE_ASSERT_FALSE(m_eof); - static const auto SyncMarkerSchema = AvroSchema::FixedSchema("Sync", 16); - if (!schema) { - static AvroSchema FileHeaderSchema = []() { - std::vector> fieldsSchema; - fieldsSchema.push_back(std::make_pair("magic", AvroSchema::FixedSchema("Magic", 4))); - fieldsSchema.push_back(std::make_pair("meta", AvroSchema::MapSchema(AvroSchema::BytesSchema))); - fieldsSchema.push_back(std::make_pair("sync", SyncMarkerSchema)); - return AvroSchema::RecordSchema("org.apache.avro.file.Header", std::move(fieldsSchema)); - }(); - auto fileHeaderDatum = AvroDatum(FileHeaderSchema); - fileHeaderDatum.Fill(*m_reader, context); - auto fileHeader = fileHeaderDatum.Value(); - if (fileHeader.Field("magic").Value() != "Obj\01") { - throw std::runtime_error("Invalid Avro object container magic."); - } - AvroMap meta = fileHeader.Field("meta").Value(); - std::string objectSchemaJson = meta["avro.schema"].Value(); - std::string codec = "null"; - if (meta.count("avro.codec") != 0) { - codec = meta["avro.codec"].Value(); - } - if (codec != "null") { - throw std::runtime_error("Unsupported Avro codec: " + codec); - } - m_syncMarker = fileHeader.Field("sync").Value(); - m_objectSchema = std::make_unique(ParseSchemaFromJsonString(objectSchemaJson)); - schema = m_objectSchema.get(); - } - - if (m_remainingObjectInCurrentBlock == 0) { - m_reader->Discard(); - m_remainingObjectInCurrentBlock = m_reader->ParseInt(context); - int64_t ObjectsSize = m_reader->ParseInt(context); - m_reader->Preload(static_cast(ObjectsSize), context); - } - - auto objectDatum = AvroDatum(*m_objectSchema); - objectDatum.Fill(*m_reader, context); - if (--m_remainingObjectInCurrentBlock == 0) { - auto markerDatum = AvroDatum(SyncMarkerSchema); - markerDatum.Fill(*m_reader, context); - auto marker = markerDatum.Value(); - if (marker != m_syncMarker) { - throw std::runtime_error("Sync marker doesn't match."); - } - m_eof = m_reader->TryPreload(1, context) == 0; - } - return objectDatum; -} - -size_t AvroStreamParser::OnRead(uint8_t* buffer, size_t count, Azure::Core::Context const& context) { - if (m_parserBuffer.Length != 0) { - size_t bytesToCopy = (std::min)(m_parserBuffer.Length, count); - std::memcpy(buffer, m_parserBuffer.Data, bytesToCopy); - m_parserBuffer.Data += bytesToCopy; - m_parserBuffer.Length -= bytesToCopy; - return bytesToCopy; - } - while (!m_parser.End()) { - auto datum = m_parser.Next(context); - if (datum.Schema().Type() == AvroDatumType::Union) { - datum = datum.Value(); - } - if (datum.Schema().Type() != AvroDatumType::Record) { - continue; - } - if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.resultData") { - auto record = datum.Value(); - auto dataDatum = record.Field("data"); - m_parserBuffer = dataDatum.Value(); - return OnRead(buffer, count, context); - } - if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.progress" && m_progressCallback) { - auto record = datum.Value(); - auto bytesScanned = record.Field("bytesScanned").Value(); - auto totalBytes = record.Field("totalBytes").Value(); - m_progressCallback(bytesScanned, totalBytes); - } - if (datum.Schema().Name() == "com.microsoft.azure.storage.queryBlobContents.error" && m_errorCallback) { - auto record = datum.Value(); - BlobQueryError e; - e.Name = record.Field("name").Value(); - e.Description = record.Field("description").Value(); - e.IsFatal = record.Field("fatal").Value(); - e.Position = record.Field("position").Value(); - m_errorCallback(std::move(e)); - } - } - return 0; -} -} // namespace _detail -} // namespace Blobs -} // namespace Storage -} // namespace Azure - -#endif diff --git a/source/libs/azure/src/avro_parser.hpp b/source/libs/azure/src/avro_parser.hpp deleted file mode 100644 index 275d073c85..0000000000 --- a/source/libs/azure/src/avro_parser.hpp +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -#pragma once - -#include "azure/storage/blobs/blob_options.hpp" - -#include - -#include -#include -#include - -namespace Azure { namespace Storage { namespace Blobs { namespace _detail { - enum class AvroDatumType - { - String, - Bytes, - Int, - Long, - Float, - Double, - Bool, - Null, - Record, - Enum, - Array, - Map, - Union, - Fixed, - }; - - class AvroStreamReader final { - public: - // position of a vector that lives through vector resizing - struct ReaderPos final - { - const std::vector* BufferPtr = nullptr; - size_t Offset = 0; - }; - explicit AvroStreamReader(Core::IO::BodyStream& stream) - : m_stream(&stream), m_pos{&m_streambuffer, 0} - { - } - AvroStreamReader(const AvroStreamReader&) = delete; - AvroStreamReader& operator=(const AvroStreamReader&) = delete; - - int64_t ParseInt(const Core::Context& context); - void Advance(size_t n, const Core::Context& context); - // Read at least n bytes from m_stream and append data to m_streambuffer. Return number of bytes - // available in m_streambuffer; - size_t Preload(size_t n, const Core::Context& context); - size_t TryPreload(size_t n, const Core::Context& context); - // discards data that's before m_pos - void Discard(); - - private: - size_t AvailableBytes() const { return m_streambuffer.size() - m_pos.Offset; } - - private: - Core::IO::BodyStream* m_stream; - std::vector m_streambuffer; - ReaderPos m_pos; - - friend class AvroDatum; - }; - - class AvroSchema final { - public: - static const AvroSchema StringSchema; - static const AvroSchema BytesSchema; - static const AvroSchema IntSchema; - static const AvroSchema LongSchema; - static const AvroSchema FloatSchema; - static const AvroSchema DoubleSchema; - static const AvroSchema BoolSchema; - static const AvroSchema NullSchema; - static AvroSchema RecordSchema( - std::string name, - const std::vector>& fieldsSchema); - static AvroSchema ArraySchema(AvroSchema elementSchema); - static AvroSchema MapSchema(AvroSchema elementSchema); - static AvroSchema UnionSchema(std::vector schemas); - static AvroSchema FixedSchema(std::string name, int64_t size); - - const std::string& Name() const { return m_name; } - AvroDatumType Type() const { return m_type; } - const std::vector& FieldNames() const { return m_status->m_keys; } - AvroSchema ItemSchema() const { return m_status->m_schemas[0]; } - const std::vector& FieldSchemas() const { return m_status->m_schemas; } - size_t Size() const { return static_cast(m_status->m_size); } - - private: - explicit AvroSchema(AvroDatumType type) : m_type(type) {} - - private: - AvroDatumType m_type; - std::string m_name; - - struct SharedStatus - { - std::vector m_keys; - std::vector m_schemas; - int64_t m_size = 0; - }; - std::shared_ptr m_status; - }; - - class AvroDatum final { - public: - AvroDatum() : m_schema(AvroSchema::NullSchema) {} - explicit AvroDatum(AvroSchema schema) : m_schema(std::move(schema)) {} - - void Fill(AvroStreamReader& reader, const Core::Context& context); - void Fill(AvroStreamReader::ReaderPos& data); - - const AvroSchema& Schema() const { return m_schema; } - - template T Value() const; - struct StringView - { - const uint8_t* Data = nullptr; - size_t Length = 0; - }; - - private: - AvroSchema m_schema; - AvroStreamReader::ReaderPos m_data; - }; - - using AvroMap = std::map; - - class AvroRecord final { - public: - bool HasField(const std::string& key) const { return FindField(key) != m_keys->size(); } - const AvroDatum& Field(const std::string& key) const { return m_values.at(FindField(key)); } - AvroDatum& Field(const std::string& key) { return m_values.at(FindField(key)); } - const AvroDatum& FieldAt(size_t i) const { return m_values.at(i); } - AvroDatum& FieldAt(size_t i) { return m_values.at(i); } - - private: - size_t FindField(const std::string& key) const - { - auto i = find(m_keys->begin(), m_keys->end(), key); - return i - m_keys->begin(); - } - const std::vector* m_keys = nullptr; - std::vector m_values; - - friend class AvroDatum; - }; - - class AvroObjectContainerReader final { - public: - explicit AvroObjectContainerReader(Core::IO::BodyStream& stream); - - bool End() const { return m_eof; } - // Calling Next() will invalidates the previous AvroDatum returned by this function and all - // AvroDatums propagated from there. - AvroDatum Next(const Core::Context& context) { return NextImpl(m_objectSchema.get(), context); } - - private: - AvroDatum NextImpl(const AvroSchema* schema, const Core::Context& context); - - private: - std::unique_ptr m_reader; - std::unique_ptr m_objectSchema; - std::string m_syncMarker; - int64_t m_remainingObjectInCurrentBlock = 0; - bool m_eof = false; - }; - - class AvroStreamParser final : public Core::IO::BodyStream { - public: - explicit AvroStreamParser( - std::unique_ptr inner, - std::function progressCallback, - std::function errorCallback) - : m_inner(std::move(inner)), m_parser(*m_inner), - m_progressCallback(std::move(progressCallback)), m_errorCallback(std::move(errorCallback)) - { - } - - int64_t Length() const override { return -1; } - void Rewind() override { this->m_inner->Rewind(); } - - private: - size_t OnRead(uint8_t* buffer, size_t count, const Azure::Core::Context& context) override; - - private: - std::unique_ptr m_inner; - AvroObjectContainerReader m_parser; - std::function m_progressCallback; - std::function m_errorCallback; - AvroDatum::StringView m_parserBuffer; - }; - -}}}} // namespace Azure::Storage::Blobs::_detail diff --git a/source/libs/azure/src/td_avro_parser.cpp b/source/libs/azure/src/td_avro_parser.cpp index 485980e007..62bd3a8151 100644 --- a/source/libs/azure/src/td_avro_parser.cpp +++ b/source/libs/azure/src/td_avro_parser.cpp @@ -1,5 +1,5 @@ #if defined(USE_S3) -#include "avro_parser.hpp" +#include #include #include diff --git a/source/libs/azure/src/td_block_blob_client.cpp b/source/libs/azure/src/td_block_blob_client.cpp index b5a5c3c189..33ac774d0c 100644 --- a/source/libs/azure/src/td_block_blob_client.cpp +++ b/source/libs/azure/src/td_block_blob_client.cpp @@ -14,7 +14,7 @@ #include #endif -#include "avro_parser.hpp" +#include #include #include From 2390532bb01179536b00864127cfeaa423e0aa08 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 13:43:42 +0800 Subject: [PATCH 048/102] az/exception: catch all cpp exception to error code --- source/libs/azure/src/az.cpp | 42 +++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index d4f93b9af4..1d693a6a32 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -201,7 +201,7 @@ _next: TAOS_RETURN(code); } -int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { +static int32_t azPutObjectFromFileOffsetImpl(const char *file, const char *object_name, int64_t offset, int64_t size) { int32_t code = 0; std::string endpointUrl = tsS3Hostname[0]; @@ -239,7 +239,25 @@ int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int TAOS_RETURN(code); } -int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { +int32_t azPutObjectFromFileOffset(const char *file, const char *object_name, int64_t offset, int64_t size) { + int32_t code = 0; + + try { + code = azPutObjectFromFileOffsetImpl(file, object_name, offset, size); + } catch (const std::exception &e) { + azError("%s: Reason Phrase: %s", __func__, e.what()); + + code = TAOS_SYSTEM_ERROR(EIO); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + +static int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t size, bool check, + uint8_t **ppBlock) { int32_t code = TSDB_CODE_SUCCESS; std::string accountName = tsS3AccessKeyId[0]; std::string accountKey = tsS3AccessKeySecret[0]; @@ -292,7 +310,8 @@ int32_t azGetObjectBlockImpl(const char *object_name, int64_t offset, int64_t si TAOS_RETURN(code); } -int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { +static int32_t azGetObjectBlockRetry(const char *object_name, int64_t offset, int64_t size, bool check, + uint8_t **ppBlock) { int32_t code = TSDB_CODE_SUCCESS; // May use an exponential backoff policy for retries with 503 @@ -312,6 +331,23 @@ _retry: TAOS_RETURN(code); } +int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, bool check, uint8_t **ppBlock) { + int32_t code = TSDB_CODE_SUCCESS; + + try { + code = azGetObjectBlockRetry(object_name, offset, size, check, ppBlock); + } catch (const std::exception &e) { + azError("%s: Reason Phrase: %s", __func__, e.what()); + + code = TAOS_SYSTEM_ERROR(EIO); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + void azDeleteObjectsByPrefix(const char *prefix) { const std::string delimiter = "/"; std::string accountName = tsS3AccessKeyId[0]; From 0fd275db66f603696bad1c96e5f22231d85c17fe Mon Sep 17 00:00:00 2001 From: dapan1121 Date: Tue, 22 Oct 2024 14:27:24 +0800 Subject: [PATCH 049/102] fix: query worker fetch response memory leak issue --- source/libs/qworker/src/qworker.c | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/source/libs/qworker/src/qworker.c b/source/libs/qworker/src/qworker.c index 7180c58404..e17027eb00 100644 --- a/source/libs/qworker/src/qworker.c +++ b/source/libs/qworker/src/qworker.c @@ -329,7 +329,7 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, if (len < 0) { QW_TASK_ELOG("invalid length from dsGetDataLength, length:%" PRId64 "", len); - QW_ERR_RET(TSDB_CODE_QRY_INVALID_INPUT); + QW_ERR_JRET(TSDB_CODE_QRY_INVALID_INPUT); } if (len == 0) { @@ -337,18 +337,18 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, code = dsGetDataBlock(ctx->sinkHandle, &output); if (code) { QW_TASK_ELOG("dsGetDataBlock failed, code:%x - %s", code, tstrerror(code)); - QW_ERR_RET(code); + QW_ERR_JRET(code); } QW_TASK_DLOG("no more data in sink and query end, fetched blocks %d rows %" PRId64, pOutput->numOfBlocks, pOutput->numOfRows); if (!ctx->dynamicTask) { - QW_ERR_RET(qwUpdateTaskStatus(QW_FPARAMS(), JOB_TASK_STATUS_SUCC, ctx->dynamicTask)); + QW_ERR_JRET(qwUpdateTaskStatus(QW_FPARAMS(), JOB_TASK_STATUS_SUCC, ctx->dynamicTask)); } if (NULL == pRsp) { - QW_ERR_RET(qwMallocFetchRsp(!ctx->localExec, len, &pRsp)); + QW_ERR_JRET(qwMallocFetchRsp(!ctx->localExec, len, &pRsp)); *pOutput = output; } else { pOutput->queryEnd = output.queryEnd; @@ -368,7 +368,7 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, *dataLen += len + PAYLOAD_PREFIX_LEN; *pRawDataLen += rawLen + PAYLOAD_PREFIX_LEN; - QW_ERR_RET(qwMallocFetchRsp(!ctx->localExec, *dataLen, &pRsp)); + QW_ERR_JRET(qwMallocFetchRsp(!ctx->localExec, *dataLen, &pRsp)); // set the serialize start position output.pData = pRsp->data + *dataLen - (len + PAYLOAD_PREFIX_LEN); @@ -380,7 +380,7 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, code = dsGetDataBlock(ctx->sinkHandle, &output); if (code) { QW_TASK_ELOG("dsGetDataBlock failed, code:%x - %s", code, tstrerror(code)); - QW_ERR_RET(code); + QW_ERR_JRET(code); } pOutput->queryEnd = output.queryEnd; @@ -399,7 +399,7 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, if (DS_BUF_EMPTY == pOutput->bufStatus && pOutput->queryEnd) { QW_TASK_DLOG("task all data fetched and done, fetched blocks %d rows %" PRId64, pOutput->numOfBlocks, pOutput->numOfRows); - QW_ERR_RET(qwUpdateTaskStatus(QW_FPARAMS(), JOB_TASK_STATUS_SUCC, ctx->dynamicTask)); + QW_ERR_JRET(qwUpdateTaskStatus(QW_FPARAMS(), JOB_TASK_STATUS_SUCC, ctx->dynamicTask)); break; } @@ -416,8 +416,11 @@ int32_t qwGetQueryResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, int32_t *dataLen, } } +_return: + *rspMsg = pRsp; - return TSDB_CODE_SUCCESS; + + return code; } int32_t qwGetDeleteResFromSink(QW_FPARAMS_DEF, SQWTaskCtx *ctx, SDeleteRes *pRes) { @@ -877,10 +880,11 @@ int32_t qwProcessCQuery(QW_FPARAMS_DEF, SQWMsg *qwMsg) { break; } + qwFreeFetchRsp(rsp); + rsp = NULL; + if (code && QW_EVENT_RECEIVED(ctx, QW_EVENT_FETCH)) { QW_SET_EVENT_PROCESSED(ctx, QW_EVENT_FETCH); - qwFreeFetchRsp(rsp); - rsp = NULL; qwMsg->connInfo = ctx->dataConnInfo; code = qwBuildAndSendFetchRsp(ctx->fetchMsgType + 1, &qwMsg->connInfo, NULL, 0, code); From 0c4e863b56bc934934429b7fd79e490e73ef12b6 Mon Sep 17 00:00:00 2001 From: Shungang Li Date: Mon, 21 Oct 2024 17:03:51 +0800 Subject: [PATCH 050/102] fix: (last) iterator of nextRowIterGet --- source/dnode/vnode/src/tsdb/tsdbCache.c | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/source/dnode/vnode/src/tsdb/tsdbCache.c b/source/dnode/vnode/src/tsdb/tsdbCache.c index 89a51eb0f5..1ac65fb94d 100644 --- a/source/dnode/vnode/src/tsdb/tsdbCache.c +++ b/source/dnode/vnode/src/tsdb/tsdbCache.c @@ -3064,9 +3064,8 @@ static int32_t nextRowIterGet(CacheNextRowIter *pIter, TSDBROW **ppRow, bool *pI iMax[nMax] = i; max[nMax++] = pIter->input[i].pRow; - } else { - pIter->input[i].next = false; } + pIter->input[i].next = false; } } From fe32bf68334b88dbdc7883bfdacbff1a50b35152 Mon Sep 17 00:00:00 2001 From: Shungang Li Date: Tue, 22 Oct 2024 14:34:43 +0800 Subject: [PATCH 051/102] enh: add test case --- tests/parallel_test/cases.task | 10 ++++----- tests/system-test/2-query/td-32548.py | 32 +++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 5 deletions(-) create mode 100644 tests/system-test/2-query/td-32548.py diff --git a/tests/parallel_test/cases.task b/tests/parallel_test/cases.task index cfe88138ef..09216add82 100644 --- a/tests/parallel_test/cases.task +++ b/tests/parallel_test/cases.task @@ -223,6 +223,7 @@ ,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 2 ,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 3 ,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/agg_group_NotReturnValue.py -Q 4 +,,y,system-test,./pytest.sh python3 ./test.py -f 2-query/td-32548.py ,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreDnode.py -N 5 -M 3 -i False ,,y,system-test,./pytest.sh python3 ./test.py -f 3-enterprise/restore/restoreVnode.py -N 5 -M 3 -i False @@ -1254,7 +1255,7 @@ ,,y,script,./test.sh -f tsim/query/unionall_as_table.sim ,,y,script,./test.sh -f tsim/query/multi_order_by.sim ,,y,script,./test.sh -f tsim/query/sys_tbname.sim -,,y,script,./test.sh -f tsim/query/sort-pre-cols.sim +,,y,script,./test.sh -f tsim/query/sort-pre-cols.sim ,,y,script,./test.sh -f tsim/query/groupby.sim ,,y,script,./test.sh -f tsim/query/groupby_distinct.sim ,,y,script,./test.sh -f tsim/query/event.sim @@ -1262,7 +1263,7 @@ ,,y,script,./test.sh -f tsim/query/emptyTsRange.sim ,,y,script,./test.sh -f tsim/query/emptyTsRange_scl.sim ,,y,script,./test.sh -f tsim/query/partitionby.sim -,,y,script,./test.sh -f tsim/query/tableCount.sim +,,y,script,./test.sh -f tsim/query/tableCount.sim ,,y,script,./test.sh -f tsim/query/show_db_table_kind.sim ,,y,script,./test.sh -f tsim/query/bi_star_table.sim ,,y,script,./test.sh -f tsim/query/bi_tag_scan.sim @@ -1532,8 +1533,8 @@ ,,n,script,./test.sh -f tsim/tagindex/sma_and_tag_index.sim ,,y,script,./test.sh -f tsim/tagindex/indexOverflow.sim ,,y,script,./test.sh -f tsim/view/view.sim -,,y,script,./test.sh -f tsim/query/cache_last.sim -,,y,script,./test.sh -f tsim/query/const.sim +,,y,script,./test.sh -f tsim/query/cache_last.sim +,,y,script,./test.sh -f tsim/query/const.sim ,,y,script,./test.sh -f tsim/query/nestedJoinView.sim @@ -1566,4 +1567,3 @@ ,,n,docs-examples-test,bash rust.sh ,,n,docs-examples-test,bash go.sh ,,n,docs-examples-test,bash test_R.sh - diff --git a/tests/system-test/2-query/td-32548.py b/tests/system-test/2-query/td-32548.py new file mode 100644 index 0000000000..45611b8372 --- /dev/null +++ b/tests/system-test/2-query/td-32548.py @@ -0,0 +1,32 @@ +from util.cases import * +from util.sql import * + +class TDTestCase: + def init(self, conn, logSql, replicaVar=1): + tdLog.debug("start to execute %s" % __file__) + tdSql.init(conn.cursor(), True) + + tdSql.execute("drop database if exists td_32548;") + tdSql.execute("create database td_32548 cachemodel 'last_row' keep 3650,3650,3650;") + + def run(self): + tdSql.execute("use td_32548;") + + tdSql.execute("create table ntb1 (ts timestamp, ival int);") + tdSql.execute("insert into ntb1 values ('2024-07-08 17:54:49.675', 54);") + + tdSql.execute("flush database td_32548;") + + tdSql.execute("insert into ntb1 values ('2024-07-08 17:53:49.675', 53);") + tdSql.execute("insert into ntb1 values ('2024-07-08 17:52:49.675', 52);") + tdSql.execute("delete from ntb1 where ts = '2024-07-08 17:54:49.675';") + + tdSql.query('select last_row(ts) from ntb1;') + tdSql.checkData(0, 0, '2024-07-08 17:53:49.675') + + def stop(self): + tdSql.close() + tdLog.success(f"{__file__} successfully executed") + +tdCases.addLinux(__file__, TDTestCase()) +tdCases.addWindows(__file__, TDTestCase()) From 0fd63e05ee62dcbf13bdb82a951d125784e35b0d Mon Sep 17 00:00:00 2001 From: dmchen Date: Tue, 22 Oct 2024 06:55:53 +0000 Subject: [PATCH 052/102] fix/TS-5532-set-seperate-thread-update-status-fix-check --- source/dnode/mgmt/node_mgmt/src/dmMgmt.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/source/dnode/mgmt/node_mgmt/src/dmMgmt.c b/source/dnode/mgmt/node_mgmt/src/dmMgmt.c index 1d6bbfa098..9f1c292a90 100644 --- a/source/dnode/mgmt/node_mgmt/src/dmMgmt.c +++ b/source/dnode/mgmt/node_mgmt/src/dmMgmt.c @@ -269,7 +269,9 @@ void dmClearVars(SDnode *pDnode) { dError("failed to unlock status info lock"); return; } - taosThreadMutexDestroy(&pData->statusInfolock); + if (taosThreadMutexDestroy(&pData->statusInfolock) != 0) { + dError("failed to destroy status info lock"); + } memset(&pData->statusInfolock, 0, sizeof(pData->statusInfolock)); (void)taosThreadMutexDestroy(&pDnode->mutex); From 6dbee3d08b4ee196cc1a4e04971adad233e12a40 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 15:02:10 +0800 Subject: [PATCH 053/102] az/test: skip ut if no env vars found --- source/libs/azure/test/azTest.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/source/libs/azure/test/azTest.cpp b/source/libs/azure/test/azTest.cpp index c1be835150..0459cb5f6a 100644 --- a/source/libs/azure/test/azTest.cpp +++ b/source/libs/azure/test/azTest.cpp @@ -86,6 +86,11 @@ TEST(AzTest, InterfaceTest) { bool withcp = false; code = azInitEnv(); + if (code) { + std::cout << "ablob env init failed with: " << code << std::endl; + return; + } + GTEST_ASSERT_EQ(code, 0); GTEST_ASSERT_EQ(tsS3Enabled, 1); From dfe1d95c5d01252426032e3bbcdc82c9735a0524 Mon Sep 17 00:00:00 2001 From: dapan1121 Date: Tue, 22 Oct 2024 15:06:01 +0800 Subject: [PATCH 054/102] fix: quick reponse mode memory leak issue --- source/libs/qworker/src/qworker.c | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/source/libs/qworker/src/qworker.c b/source/libs/qworker/src/qworker.c index e17027eb00..7af834458e 100644 --- a/source/libs/qworker/src/qworker.c +++ b/source/libs/qworker/src/qworker.c @@ -475,6 +475,12 @@ int32_t qwQuickRspFetchReq(QW_FPARAMS_DEF, SQWTaskCtx *ctx, SQWMsg *qwMsg, int32 code = qwGetQueryResFromSink(QW_FPARAMS(), ctx, &dataLen, &rawLen, &rsp, &sOutput); } + if (code) { + qwFreeFetchRsp(rsp); + rsp = NULL; + dataLen = 0; + } + if (NULL == rsp && TSDB_CODE_SUCCESS == code) { return TSDB_CODE_SUCCESS; } From a60f798ada056727373b62959e0024c5c7502d08 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Tue, 22 Oct 2024 15:19:25 +0800 Subject: [PATCH 055/102] add docs for fill value columns --- docs/en/14-reference/03-taos-sql/12-distinguished.md | 2 +- docs/zh/14-reference/03-taos-sql/12-distinguished.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/en/14-reference/03-taos-sql/12-distinguished.md b/docs/en/14-reference/03-taos-sql/12-distinguished.md index bfc9ca32c0..2374b762d4 100644 --- a/docs/en/14-reference/03-taos-sql/12-distinguished.md +++ b/docs/en/14-reference/03-taos-sql/12-distinguished.md @@ -80,7 +80,7 @@ These pseudocolumns occur after the aggregation clause. `FILL` clause is used to specify how to fill when there is data missing in any window, including: 1. NONE: No fill (the default fill mode) -2. VALUE: Fill with a fixed value, which should be specified together, for example `FILL(VALUE, 1.23)` Note: The value filled depends on the data type. For example, if you run FILL(VALUE 1.23) on an integer column, the value 1 is filled. If multiple columns in select list need to be filled, then in the fill clause there must be a fill value for each of these columns, for example, `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`. +2. VALUE: Fill with a fixed value, which should be specified together, for example `FILL(VALUE, 1.23)` Note: The value filled depends on the data type. For example, if you run FILL(VALUE 1.23) on an integer column, the value 1 is filled. If multiple columns in select list need to be filled, then in the fill clause there must be a fill value for each of these columns, for example, `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`. Note that only exprs in select list that contains normal cols need to specify fill value, exprs like `_wstart`, `_wend`, `_wduration`, `_wstart + 1a`, `now`, `1+1`, partition keys like tbname(when using partition by) don't need to specify fill value. But exprs like `timediff(last(ts), _wstart)` need to specify fill value. 3. PREV: Fill with the previous non-NULL value, `FILL(PREV)` 4. NULL: Fill with NULL, `FILL(NULL)` 5. LINEAR: Fill with the closest non-NULL value, `FILL(LINEAR)` diff --git a/docs/zh/14-reference/03-taos-sql/12-distinguished.md b/docs/zh/14-reference/03-taos-sql/12-distinguished.md index e149c2c82e..0b834dea29 100644 --- a/docs/zh/14-reference/03-taos-sql/12-distinguished.md +++ b/docs/zh/14-reference/03-taos-sql/12-distinguished.md @@ -76,7 +76,7 @@ window_clause: { FILL 语句指定某一窗口区间数据缺失的情况下的填充模式。填充模式包括以下几种: 1. 不进行填充:NONE(默认填充模式)。 -2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如:FILL(VALUE, 1.23)。这里需要注意,最终填充的值受由相应列的类型决定,如 FILL(VALUE, 1.23),相应列为 INT 类型,则填充值为 1, 若查询列表中有多列需要FILL, 则需要给每一个FILL列指定VALUE, 如`SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`。 +2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如:FILL(VALUE, 1.23)。这里需要注意,最终填充的值受由相应列的类型决定,如 FILL(VALUE, 1.23),相应列为 INT 类型,则填充值为 1, 若查询列表中有多列需要FILL, 则需要给每一个FILL列指定VALUE, 如`SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`, 注意, SELECT表达式中只有包含普通列时才需要指定FILL VALUE, 如`_wstart`, `_wstart+1a`, `now`, `1+1` 以及使用partition by时的partition key(如tbname)都不需要指定VALUE, 如`timediff(last(ts), _wstart)`则需要指定VALUE。 3. PREV 填充:使用前一个非 NULL 值填充数据。例如:FILL(PREV)。 4. NULL 填充:使用 NULL 填充数据。例如:FILL(NULL)。 5. LINEAR 填充:根据前后距离最近的非 NULL 值做线性插值填充。例如:FILL(LINEAR)。 From 3fd1ad6e824daa13278cab7405aa4e1f85bd7eea Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Tue, 22 Oct 2024 16:00:46 +0800 Subject: [PATCH 056/102] az/delete: catch all cpp exceptions --- source/libs/azure/src/az.cpp | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 1d693a6a32..831694356a 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -348,7 +348,7 @@ int32_t azGetObjectBlock(const char *object_name, int64_t offset, int64_t size, TAOS_RETURN(code); } -void azDeleteObjectsByPrefix(const char *prefix) { +static void azDeleteObjectsByPrefixImpl(const char *prefix) { const std::string delimiter = "/"; std::string accountName = tsS3AccessKeyId[0]; std::string accountKey = tsS3AccessKeySecret[0]; @@ -380,7 +380,16 @@ void azDeleteObjectsByPrefix(const char *prefix) { } catch (const Azure::Core::RequestFailedException &e) { azError("%s failed at line %d since %d(%s)", __func__, __LINE__, static_cast(e.StatusCode), e.ReasonPhrase.c_str()); - // azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(TAOS_SYSTEM_ERROR(EIO))); + } +} + +void azDeleteObjectsByPrefix(const char *prefix) { + int32_t code = TSDB_CODE_SUCCESS; + + try { + azDeleteObjectsByPrefixImpl(prefix); + } catch (const std::exception &e) { + azError("%s: Reason Phrase: %s", __func__, e.what()); } } From b1aebc9dc755b7fec9668e53cb9d81324e7dec3d Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Tue, 22 Oct 2024 19:23:57 +0800 Subject: [PATCH 057/102] add trace log --- source/libs/transport/src/transCli.c | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 18bedab5c7..136279beed 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -703,8 +703,9 @@ void cliHandleResp(SCliConn* conn) { code = cliBuildRespFromCont(pReq, &resp, pHead); STraceId* trace = &resp.info.traceId; - tGDebug("%s conn %p %s received from %s, local info:%s, len:%d, seq:%" PRId64 ", sid:%" PRId64 "", - CONN_GET_INST_LABEL(conn), conn, TMSG_INFO(resp.msgType), conn->dst, conn->src, pHead->msgLen, seq, qId); + tGDebug("%s conn %p %s received from %s, local info:%s, len:%d, seq:%" PRId64 ", sid:%" PRId64 ", code:%s", + CONN_GET_INST_LABEL(conn), conn, TMSG_INFO(resp.msgType), conn->dst, conn->src, pHead->msgLen, seq, qId, + tstrerror(pHead->code)); code = cliNotifyCb(conn, pReq, &resp); if (code == TSDB_CODE_RPC_ASYNC_IN_PROCESS) { @@ -2955,7 +2956,7 @@ int32_t cliNotifyCb(SCliConn* pConn, SCliReq* pReq, STransMsg* pResp) { SCliThrd* pThrd = pConn->hostThrd; STrans* pInst = pThrd->pInst; - if (pReq != NULL) { + if (pReq != NULL && pResp->code != TSDB_CODE_SUCCESS) { if (cliMayRetry(pConn, pReq, pResp)) { return TSDB_CODE_RPC_ASYNC_IN_PROCESS; } From 6cd211cfd78870051bc6851fd2e0bae2f061a95a Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Tue, 22 Oct 2024 19:38:42 +0800 Subject: [PATCH 058/102] add trace log --- source/libs/transport/src/transCli.c | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 136279beed..9e0e6a0d24 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -2956,13 +2956,14 @@ int32_t cliNotifyCb(SCliConn* pConn, SCliReq* pReq, STransMsg* pResp) { SCliThrd* pThrd = pConn->hostThrd; STrans* pInst = pThrd->pInst; - if (pReq != NULL && pResp->code != TSDB_CODE_SUCCESS) { - if (cliMayRetry(pConn, pReq, pResp)) { - return TSDB_CODE_RPC_ASYNC_IN_PROCESS; + if (pReq != NULL) { + if (pResp->code != TSDB_CODE_SUCCESS) { + if (cliMayRetry(pConn, pReq, pResp)) { + return TSDB_CODE_RPC_ASYNC_IN_PROCESS; + } + cliMayResetRespCode(pReq, pResp); } - cliMayResetRespCode(pReq, pResp); - if (cliTryUpdateEpset(pReq, pResp)) { cliPerfLog_epset(pConn, pReq); } From bc7857c39f790252db9df0b1a0e20a43afe12828 Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Wed, 23 Oct 2024 08:31:05 +0800 Subject: [PATCH 059/102] add config --- include/util/tdef.h | 2 +- source/dnode/mgmt/node_mgmt/src/dmTransport.c | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/util/tdef.h b/include/util/tdef.h index a0bfdc83f5..b4cb1bdd1c 100644 --- a/include/util/tdef.h +++ b/include/util/tdef.h @@ -506,7 +506,7 @@ typedef enum ELogicConditionType { #ifdef WINDOWS #define TSDB_MAX_RPC_THREADS 4 // windows pipe only support 4 connections. #else -#define TSDB_MAX_RPC_THREADS 20 +#define TSDB_MAX_RPC_THREADS 50 #endif #define TSDB_QUERY_TYPE_NON_TYPE 0x00u // none type diff --git a/source/dnode/mgmt/node_mgmt/src/dmTransport.c b/source/dnode/mgmt/node_mgmt/src/dmTransport.c index 28d6113bba..fd593e0638 100644 --- a/source/dnode/mgmt/node_mgmt/src/dmTransport.c +++ b/source/dnode/mgmt/node_mgmt/src/dmTransport.c @@ -16,8 +16,8 @@ #define _DEFAULT_SOURCE #include "dmMgmt.h" #include "qworker.h" -#include "tversion.h" #include "tanal.h" +#include "tversion.h" static inline void dmSendRsp(SRpcMsg *pMsg) { if (rpcSendResponse(pMsg) != 0) { @@ -411,7 +411,7 @@ int32_t dmInitClient(SDnode *pDnode) { rpcInit.noDelayFp = rpcNoDelayMsg; - int32_t connLimitNum = tsNumOfRpcSessions / (tsNumOfRpcThreads * 3) / 2; + int32_t connLimitNum = tsNumOfRpcSessions / (tsNumOfRpcThreads * 3); connLimitNum = TMAX(connLimitNum, 10); connLimitNum = TMIN(connLimitNum, 500); From df48aea78ac78017b0136cac4ee698e1307e21b2 Mon Sep 17 00:00:00 2001 From: Yaming Pei Date: Wed, 23 Oct 2024 09:35:16 +0800 Subject: [PATCH 060/102] docs: odbc doc format adjustment --- docs/zh/14-reference/05-connector/50-odbc.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/zh/14-reference/05-connector/50-odbc.mdx b/docs/zh/14-reference/05-connector/50-odbc.mdx index 38dd88b86d..8518d2ffd7 100644 --- a/docs/zh/14-reference/05-connector/50-odbc.mdx +++ b/docs/zh/14-reference/05-connector/50-odbc.mdx @@ -109,7 +109,7 @@ WebSocket 连接方式除此之外还支持 Windows X64系统上运行的 32 位 ## 版本历史 -| taos_odbc版本 | 主要变化 | TDengine 版本 | +| taos_odbc 版本 | 主要变化 | TDengine 版本 | | :----------- | :-------------------------------------------------------------------------------------------------- | :---------------- | | v1.1.0 | 1. 支持视图功能;
2. 支持 VARBINARY/GEOMETRY 数据类型; | 3.3.3.0及更高版本 | | v1.0.2 | 支持 CP1252 字符编码; | 3.2.3.0及更高版本 | @@ -145,7 +145,7 @@ WebSocket 连接方式除此之外还支持 Windows X64系统上运行的 32 位 ## API 参考 -本节按功能分类汇总了 ODBC API,关于完整的 ODBC API 参考,请访问 http://msdn.microsoft.com/en-us/library/ms714177.aspx 的ODBC程序员参考页面。 +本节按功能分类汇总了 ODBC API,关于完整的 ODBC API 参考,请访问 http://msdn.microsoft.com/en-us/library/ms714177.aspx 的 ODBC 程序员参考页面。 ### 数据源和驱动程序管理 From ca3f98f83791ed42704ec37e92c9a897a467d87a Mon Sep 17 00:00:00 2001 From: dapan1121 Date: Wed, 23 Oct 2024 10:25:43 +0800 Subject: [PATCH 061/102] fix: vnode close caused crash issue --- source/dnode/mgmt/mgmt_vnode/src/vmInt.c | 6 +++--- source/libs/qworker/src/qwMsg.c | 4 ++++ source/libs/qworker/src/qworker.c | 2 ++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/source/dnode/mgmt/mgmt_vnode/src/vmInt.c b/source/dnode/mgmt/mgmt_vnode/src/vmInt.c index 3cf0382eba..20618dbdf3 100644 --- a/source/dnode/mgmt/mgmt_vnode/src/vmInt.c +++ b/source/dnode/mgmt/mgmt_vnode/src/vmInt.c @@ -213,13 +213,13 @@ void vmCloseVnode(SVnodeMgmt *pMgmt, SVnodeObj *pVnode, bool commitAndRemoveWal) taosQueueGetThreadId(pVnode->pApplyW.queue)); tMultiWorkerCleanup(&pVnode->pApplyW); - dInfo("vgId:%d, wait for vnode query queue:%p is empty", pVnode->vgId, pVnode->pQueryQ); - while (!taosQueueEmpty(pVnode->pQueryQ)) taosMsleep(10); - dInfo("vgId:%d, wait for vnode fetch queue:%p is empty, thread:%08" PRId64, pVnode->vgId, pVnode->pFetchQ, taosQueueGetThreadId(pVnode->pFetchQ)); while (!taosQueueEmpty(pVnode->pFetchQ)) taosMsleep(10); + dInfo("vgId:%d, wait for vnode query queue:%p is empty", pVnode->vgId, pVnode->pQueryQ); + while (!taosQueueEmpty(pVnode->pQueryQ)) taosMsleep(10); + tqNotifyClose(pVnode->pImpl->pTq); dInfo("vgId:%d, wait for vnode stream queue:%p is empty", pVnode->vgId, pVnode->pStreamQ); while (!taosQueueEmpty(pVnode->pStreamQ)) taosMsleep(10); diff --git a/source/libs/qworker/src/qwMsg.c b/source/libs/qworker/src/qwMsg.c index 0011d1c70c..69014d5b1c 100644 --- a/source/libs/qworker/src/qwMsg.c +++ b/source/libs/qworker/src/qwMsg.c @@ -502,6 +502,10 @@ int32_t qWorkerProcessQueryMsg(void *node, void *qWorkerMgmt, SRpcMsg *pMsg, int } int32_t qWorkerProcessCQueryMsg(void *node, void *qWorkerMgmt, SRpcMsg *pMsg, int64_t ts) { + if (NULL == node || NULL == qWorkerMgmt || NULL == pMsg) { + QW_ERR_RET(TSDB_CODE_QRY_INVALID_INPUT); + } + int32_t code = 0; int8_t status = 0; bool queryDone = false; diff --git a/source/libs/qworker/src/qworker.c b/source/libs/qworker/src/qworker.c index 7180c58404..c77d9e0dbd 100644 --- a/source/libs/qworker/src/qworker.c +++ b/source/libs/qworker/src/qworker.c @@ -1432,6 +1432,8 @@ void qWorkerDestroy(void **qWorkerMgmt) { while (0 == destroyed) { taosMsleep(2); } + + *qWorkerMgmt = NULL; } int32_t qWorkerGetStat(SReadHandle *handle, void *qWorkerMgmt, SQWorkerStat *pStat) { From bf2261bcff2dfe775e71ce3f293898db09abb264 Mon Sep 17 00:00:00 2001 From: dmchen Date: Wed, 23 Oct 2024 03:40:40 +0000 Subject: [PATCH 062/102] fix/TD-32621-add-log --- source/dnode/vnode/src/meta/metaOpen.c | 1 + 1 file changed, 1 insertion(+) diff --git a/source/dnode/vnode/src/meta/metaOpen.c b/source/dnode/vnode/src/meta/metaOpen.c index 8f2c0b5a5e..659ba3f777 100644 --- a/source/dnode/vnode/src/meta/metaOpen.c +++ b/source/dnode/vnode/src/meta/metaOpen.c @@ -496,6 +496,7 @@ void metaULock(SMeta *pMeta) { static void metaCleanup(SMeta **ppMeta) { SMeta *pMeta = *ppMeta; if (pMeta) { + metaInfo("vgId:%d meta clean up, path:%s", TD_VID(pMeta->pVnode), pMeta->path); if (pMeta->pEnv) metaAbort(pMeta); if (pMeta->pCache) metaCacheClose(pMeta); #ifdef BUILD_NO_CALL From 4df109b67ffcf4ec2ffdb5594e68e985da832611 Mon Sep 17 00:00:00 2001 From: dmchen Date: Thu, 17 Oct 2024 10:55:36 +0000 Subject: [PATCH 063/102] fix/TD-32594-set-stage-when-insert --- source/dnode/mnode/impl/src/mndTrans.c | 27 +++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/source/dnode/mnode/impl/src/mndTrans.c b/source/dnode/mnode/impl/src/mndTrans.c index 657601f5ae..4268d73746 100644 --- a/source/dnode/mnode/impl/src/mndTrans.c +++ b/source/dnode/mnode/impl/src/mndTrans.c @@ -474,6 +474,21 @@ static int32_t mndTransActionInsert(SSdb *pSdb, STrans *pTrans) { // pTrans->startFunc = 0; } + if (pTrans->stage == TRN_STAGE_COMMIT) { + pTrans->stage = TRN_STAGE_COMMIT_ACTION; + mInfo("trans:%d, stage from commit to commitAction since perform update action", pTrans->id); + } + + if (pTrans->stage == TRN_STAGE_ROLLBACK) { + pTrans->stage = TRN_STAGE_UNDO_ACTION; + mInfo("trans:%d, stage from rollback to undoAction since perform update action", pTrans->id); + } + + if (pTrans->stage == TRN_STAGE_PRE_FINISH) { + pTrans->stage = TRN_STAGE_FINISH; + mInfo("trans:%d, stage from pre-finish to finished since perform update action", pTrans->id); + } + return 0; } @@ -563,17 +578,17 @@ static int32_t mndTransActionUpdate(SSdb *pSdb, STrans *pOld, STrans *pNew) { if (pOld->stage == TRN_STAGE_COMMIT) { pOld->stage = TRN_STAGE_COMMIT_ACTION; - mTrace("trans:%d, stage from commit to commitAction since perform update action", pNew->id); + mInfo("trans:%d, stage from commit to commitAction since perform update action", pNew->id); } if (pOld->stage == TRN_STAGE_ROLLBACK) { pOld->stage = TRN_STAGE_UNDO_ACTION; - mTrace("trans:%d, stage from rollback to undoAction since perform update action", pNew->id); + mInfo("trans:%d, stage from rollback to undoAction since perform update action", pNew->id); } if (pOld->stage == TRN_STAGE_PRE_FINISH) { pOld->stage = TRN_STAGE_FINISH; - mTrace("trans:%d, stage from pre-finish to finished since perform update action", pNew->id); + mInfo("trans:%d, stage from pre-finish to finished since perform update action", pNew->id); } return 0; @@ -1295,7 +1310,7 @@ static void mndTransResetActions(SMnode *pMnode, STrans *pTrans, SArray *pArray) } } -// execute at bottom half +// execute in sync context static int32_t mndTransWriteSingleLog(SMnode *pMnode, STrans *pTrans, STransAction *pAction, bool topHalf) { if (pAction->rawWritten) return 0; if (topHalf) { @@ -1321,7 +1336,7 @@ static int32_t mndTransWriteSingleLog(SMnode *pMnode, STrans *pTrans, STransActi TAOS_RETURN(code); } -// execute at top half +// execute in trans context static int32_t mndTransSendSingleMsg(SMnode *pMnode, STrans *pTrans, STransAction *pAction, bool topHalf) { if (pAction->msgSent) return 0; if (mndCannotExecuteTransAction(pMnode, topHalf)) { @@ -1701,6 +1716,7 @@ static bool mndTransPerformRedoActionStage(SMnode *pMnode, STrans *pTrans, bool return continueExec; } +// in trans context static bool mndTransPerformCommitStage(SMnode *pMnode, STrans *pTrans, bool topHalf) { if (mndCannotExecuteTransAction(pMnode, topHalf)) return false; @@ -1775,6 +1791,7 @@ static bool mndTransPerformUndoActionStage(SMnode *pMnode, STrans *pTrans, bool return continueExec; } +// in trans context static bool mndTransPerformRollbackStage(SMnode *pMnode, STrans *pTrans, bool topHalf) { if (mndCannotExecuteTransAction(pMnode, topHalf)) return false; From d1d66896848dbede42490de2ad54ae1a2ef2b3fa Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Wed, 23 Oct 2024 15:15:03 +0800 Subject: [PATCH 064/102] add not fill exprs for fill operator --- include/libs/nodes/plannodes.h | 2 + source/libs/executor/inc/tfill.h | 11 +- source/libs/executor/src/filloperator.c | 32 ++- source/libs/executor/src/streamfilloperator.c | 2 +- source/libs/executor/src/tfill.c | 54 ++-- source/libs/executor/src/timesliceoperator.c | 3 +- source/libs/nodes/src/nodesCloneFuncs.c | 1 + source/libs/nodes/src/nodesCodeFuncs.c | 7 + source/libs/nodes/src/nodesMsgFuncs.c | 9 +- source/libs/nodes/src/nodesUtilFuncs.c | 2 + source/libs/planner/src/planLogicCreater.c | 248 +++++++++--------- source/libs/planner/src/planPhysiCreater.c | 6 + tests/system-test/2-query/fill_with_group.py | 6 +- 13 files changed, 223 insertions(+), 160 deletions(-) diff --git a/include/libs/nodes/plannodes.h b/include/libs/nodes/plannodes.h index bfe9e9555b..992948212c 100644 --- a/include/libs/nodes/plannodes.h +++ b/include/libs/nodes/plannodes.h @@ -325,6 +325,7 @@ typedef struct SFillLogicNode { SNode* pWStartTs; SNode* pValues; // SNodeListNode STimeWindow timeRange; + SNodeList* pFillNullExprs; } SFillLogicNode; typedef struct SSortLogicNode { @@ -663,6 +664,7 @@ typedef struct SFillPhysiNode { SNode* pWStartTs; // SColumnNode SNode* pValues; // SNodeListNode STimeWindow timeRange; + SNodeList* pFillNullExprs; } SFillPhysiNode; typedef SFillPhysiNode SStreamFillPhysiNode; diff --git a/source/libs/executor/inc/tfill.h b/source/libs/executor/inc/tfill.h index b06aa7d1c8..31ac5689f6 100644 --- a/source/libs/executor/inc/tfill.h +++ b/source/libs/executor/inc/tfill.h @@ -35,6 +35,7 @@ typedef struct SFillColInfo { SExprInfo* pExpr; bool notFillCol; // denote if this column needs fill operation SVariant fillVal; + bool fillNull; } SFillColInfo; typedef struct SFillLinearInfo { @@ -125,12 +126,14 @@ void taosFillSetInputDataBlock(struct SFillInfo* pFillInfo, const struc void taosFillUpdateStartTimestampInfo(SFillInfo* pFillInfo, int64_t ts); bool taosFillNotStarted(const SFillInfo* pFillInfo); SFillColInfo* createFillColInfo(SExprInfo* pExpr, int32_t numOfFillExpr, SExprInfo* pNotFillExpr, - int32_t numOfNotFillCols, const struct SNodeListNode* val); + int32_t numOfNotFillCols, SExprInfo* pFillNullExpr, int32_t numOfFillNullExprs, + const struct SNodeListNode* val); bool taosFillHasMoreResults(struct SFillInfo* pFillInfo); -int32_t taosCreateFillInfo(TSKEY skey, int32_t numOfFillCols, int32_t numOfNotFillCols, int32_t capacity, - SInterval* pInterval, int32_t fillType, struct SFillColInfo* pCol, int32_t slotId, - int32_t order, const char* id, SExecTaskInfo* pTaskInfo, SFillInfo** ppFillInfo); +int32_t taosCreateFillInfo(TSKEY skey, int32_t numOfFillCols, int32_t numOfNotFillCols, int32_t fillNullCols, + int32_t capacity, SInterval* pInterval, int32_t fillType, struct SFillColInfo* pCol, + int32_t slotId, int32_t order, const char* id, SExecTaskInfo* pTaskInfo, + SFillInfo** ppFillInfo); void* taosDestroyFillInfo(struct SFillInfo* pFillInfo); int32_t taosFillResultDataBlock(struct SFillInfo* pFillInfo, SSDataBlock* p, int32_t capacity); diff --git a/source/libs/executor/src/filloperator.c b/source/libs/executor/src/filloperator.c index d530382f7c..d7a55c86bb 100644 --- a/source/libs/executor/src/filloperator.c +++ b/source/libs/executor/src/filloperator.c @@ -53,6 +53,7 @@ typedef struct SFillOperatorInfo { SExprInfo* pExprInfo; int32_t numOfExpr; SExprSupp noFillExprSupp; + SExprSupp fillNullExprSupp; } SFillOperatorInfo; static void destroyFillOperatorInfo(void* param); @@ -140,6 +141,15 @@ void doApplyScalarCalculation(SOperatorInfo* pOperator, SSDataBlock* pBlock, int code = projectApplyFunctions(pNoFillSupp->pExprInfo, pInfo->pRes, pBlock, pNoFillSupp->pCtx, pNoFillSupp->numOfExprs, NULL); QUERY_CHECK_CODE(code, lino, _end); + + if (pInfo->fillNullExprSupp.pExprInfo) { + pInfo->pRes->info.rows = 0; + code = setInputDataBlock(&pInfo->fillNullExprSupp, pBlock, order, scanFlag, false); + QUERY_CHECK_CODE(code, lino, _end); + code = projectApplyFunctions(pInfo->fillNullExprSupp.pExprInfo, pInfo->pRes, pBlock, pInfo->fillNullExprSupp.pCtx, + pInfo->fillNullExprSupp.numOfExprs, NULL); + } + pInfo->pRes->info.id.groupId = pBlock->info.id.groupId; _end: @@ -334,10 +344,11 @@ void destroyFillOperatorInfo(void* param) { } static int32_t initFillInfo(SFillOperatorInfo* pInfo, SExprInfo* pExpr, int32_t numOfCols, SExprInfo* pNotFillExpr, - int32_t numOfNotFillCols, SNodeListNode* pValNode, STimeWindow win, int32_t capacity, - const char* id, SInterval* pInterval, int32_t fillType, int32_t order, - SExecTaskInfo* pTaskInfo) { - SFillColInfo* pColInfo = createFillColInfo(pExpr, numOfCols, pNotFillExpr, numOfNotFillCols, pValNode); + int32_t numOfNotFillCols, SExprInfo* pFillNullExpr, int32_t numOfFillNullExprs, + SNodeListNode* pValNode, STimeWindow win, int32_t capacity, const char* id, + SInterval* pInterval, int32_t fillType, int32_t order, SExecTaskInfo* pTaskInfo) { + SFillColInfo* pColInfo = + createFillColInfo(pExpr, numOfCols, pNotFillExpr, numOfNotFillCols, pFillNullExpr, numOfFillNullExprs, pValNode); if (!pColInfo) { qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(terrno)); return terrno; @@ -348,8 +359,8 @@ static int32_t initFillInfo(SFillOperatorInfo* pInfo, SExprInfo* pExpr, int32_t // STimeWindow w = {0}; // getInitialStartTimeWindow(pInterval, startKey, &w, order == TSDB_ORDER_ASC); pInfo->pFillInfo = NULL; - int32_t code = taosCreateFillInfo(startKey, numOfCols, numOfNotFillCols, capacity, pInterval, fillType, pColInfo, - pInfo->primaryTsCol, order, id, pTaskInfo, &pInfo->pFillInfo); + int32_t code = taosCreateFillInfo(startKey, numOfCols, numOfNotFillCols, numOfFillNullExprs, capacity, pInterval, + fillType, pColInfo, pInfo->primaryTsCol, order, id, pTaskInfo, &pInfo->pFillInfo); if (code != TSDB_CODE_SUCCESS) { qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); return code; @@ -455,6 +466,13 @@ int32_t createFillOperatorInfo(SOperatorInfo* downstream, SFillPhysiNode* pPhyFi initExprSupp(pNoFillSupp, pNoFillSupp->pExprInfo, pNoFillSupp->numOfExprs, &pTaskInfo->storageAPI.functionStore); QUERY_CHECK_CODE(code, lino, _error); + code = createExprInfo(pPhyFillNode->pFillNullExprs, NULL, &pInfo->fillNullExprSupp.pExprInfo, + &pInfo->fillNullExprSupp.numOfExprs); + QUERY_CHECK_CODE(code, lino, _error); + code = initExprSupp(&pInfo->fillNullExprSupp, pInfo->fillNullExprSupp.pExprInfo, pInfo->fillNullExprSupp.numOfExprs, + &pTaskInfo->storageAPI.functionStore); + QUERY_CHECK_CODE(code, lino, _error); + SInterval* pInterval = QUERY_NODE_PHYSICAL_PLAN_MERGE_ALIGNED_INTERVAL == downstream->operatorType ? &((SMergeAlignedIntervalAggOperatorInfo*)downstream->info)->intervalAggOperatorInfo->interval @@ -482,7 +500,9 @@ int32_t createFillOperatorInfo(SOperatorInfo* downstream, SFillPhysiNode* pPhyFi code = extractColMatchInfo(pPhyFillNode->pFillExprs, pPhyFillNode->node.pOutputDataBlockDesc, &numOfOutputCols, COL_MATCH_FROM_SLOT_ID, &pInfo->matchInfo); + QUERY_CHECK_CODE(code, lino, _error); code = initFillInfo(pInfo, pExprInfo, pInfo->numOfExpr, pNoFillSupp->pExprInfo, pNoFillSupp->numOfExprs, + pInfo->fillNullExprSupp.pExprInfo, pInfo->fillNullExprSupp.numOfExprs, (SNodeListNode*)pPhyFillNode->pValues, pPhyFillNode->timeRange, pResultInfo->capacity, pTaskInfo->id.str, pInterval, type, order, pTaskInfo); if (code != TSDB_CODE_SUCCESS) { diff --git a/source/libs/executor/src/streamfilloperator.c b/source/libs/executor/src/streamfilloperator.c index 826220581a..b7061fad97 100644 --- a/source/libs/executor/src/streamfilloperator.c +++ b/source/libs/executor/src/streamfilloperator.c @@ -1201,7 +1201,7 @@ static SStreamFillSupporter* initStreamFillSup(SStreamFillPhysiNode* pPhyFillNod QUERY_CHECK_CODE(code, lino, _end); pFillSup->pAllColInfo = createFillColInfo(pFillExprInfo, pFillSup->numOfFillCols, noFillExprInfo, numOfNotFillCols, - (const SNodeListNode*)(pPhyFillNode->pValues)); + NULL, 0, (const SNodeListNode*)(pPhyFillNode->pValues)); if (pFillSup->pAllColInfo == NULL) { code = terrno; lino = __LINE__; diff --git a/source/libs/executor/src/tfill.c b/source/libs/executor/src/tfill.c index cdfbd7a850..190b327522 100644 --- a/source/libs/executor/src/tfill.c +++ b/source/libs/executor/src/tfill.c @@ -39,22 +39,27 @@ static int32_t doSetVal(SColumnInfoData* pDstColInfoData, int32_t rowIndex, const SGroupKeys* pKey); static void setNotFillColumn(SFillInfo* pFillInfo, SColumnInfoData* pDstColInfo, int32_t rowIndex, int32_t colIdx) { - SRowVal* p = NULL; - if (pFillInfo->type == TSDB_FILL_NEXT) { - p = FILL_IS_ASC_FILL(pFillInfo) ? &pFillInfo->next : &pFillInfo->prev; + SFillColInfo* pCol = &pFillInfo->pFillCol[colIdx]; + if (pCol->fillNull) { + colDataSetNULL(pDstColInfo, rowIndex); } else { - p = FILL_IS_ASC_FILL(pFillInfo) ? &pFillInfo->prev : &pFillInfo->next; - } + SRowVal* p = NULL; + if (pFillInfo->type == TSDB_FILL_NEXT) { + p = FILL_IS_ASC_FILL(pFillInfo) ? &pFillInfo->next : &pFillInfo->prev; + } else { + p = FILL_IS_ASC_FILL(pFillInfo) ? &pFillInfo->prev : &pFillInfo->next; + } - SGroupKeys* pKey = taosArrayGet(p->pRowVal, colIdx); - if (!pKey) { - qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(terrno)); - T_LONG_JMP(pFillInfo->pTaskInfo->env, terrno); - } - int32_t code = doSetVal(pDstColInfo, rowIndex, pKey); - if (code != TSDB_CODE_SUCCESS) { - qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); - T_LONG_JMP(pFillInfo->pTaskInfo->env, code); + SGroupKeys* pKey = taosArrayGet(p->pRowVal, colIdx); + if (!pKey) { + qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(terrno)); + T_LONG_JMP(pFillInfo->pTaskInfo->env, terrno); + } + int32_t code = doSetVal(pDstColInfo, rowIndex, pKey); + if (code != TSDB_CODE_SUCCESS) { + qError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + T_LONG_JMP(pFillInfo->pTaskInfo->env, code); + } } } @@ -545,9 +550,10 @@ static int32_t taosNumOfRemainRows(SFillInfo* pFillInfo) { return pFillInfo->numOfRows - pFillInfo->index; } -int32_t taosCreateFillInfo(TSKEY skey, int32_t numOfFillCols, int32_t numOfNotFillCols, int32_t capacity, - SInterval* pInterval, int32_t fillType, struct SFillColInfo* pCol, int32_t primaryTsSlotId, - int32_t order, const char* id, SExecTaskInfo* pTaskInfo, SFillInfo** ppFillInfo) { +int32_t taosCreateFillInfo(TSKEY skey, int32_t numOfFillCols, int32_t numOfNotFillCols, int32_t fillNullCols, + int32_t capacity, SInterval* pInterval, int32_t fillType, struct SFillColInfo* pCol, + int32_t primaryTsSlotId, int32_t order, const char* id, SExecTaskInfo* pTaskInfo, + SFillInfo** ppFillInfo) { int32_t code = TSDB_CODE_SUCCESS; int32_t lino = 0; if (fillType == TSDB_FILL_NONE) { @@ -574,7 +580,7 @@ int32_t taosCreateFillInfo(TSKEY skey, int32_t numOfFillCols, int32_t numOfNotFi pFillInfo->type = fillType; pFillInfo->pFillCol = pCol; - pFillInfo->numOfCols = numOfFillCols + numOfNotFillCols; + pFillInfo->numOfCols = numOfFillCols + numOfNotFillCols + fillNullCols; pFillInfo->alloc = capacity; pFillInfo->id = id; pFillInfo->interval = *pInterval; @@ -761,10 +767,11 @@ _end: int64_t getFillInfoStart(struct SFillInfo* pFillInfo) { return pFillInfo->start; } SFillColInfo* createFillColInfo(SExprInfo* pExpr, int32_t numOfFillExpr, SExprInfo* pNotFillExpr, - int32_t numOfNoFillExpr, const struct SNodeListNode* pValNode) { + int32_t numOfNoFillExpr, SExprInfo* pFillNullExpr, int32_t numOfFillNullExpr, + const struct SNodeListNode* pValNode) { int32_t code = TSDB_CODE_SUCCESS; int32_t lino = 0; - SFillColInfo* pFillCol = taosMemoryCalloc(numOfFillExpr + numOfNoFillExpr, sizeof(SFillColInfo)); + SFillColInfo* pFillCol = taosMemoryCalloc(numOfFillExpr + numOfNoFillExpr + numOfFillNullExpr, sizeof(SFillColInfo)); if (pFillCol == NULL) { return NULL; } @@ -797,6 +804,13 @@ SFillColInfo* createFillColInfo(SExprInfo* pExpr, int32_t numOfFillExpr, SExprIn pFillCol[i + numOfFillExpr].notFillCol = true; } + for (int32_t i = 0; i < numOfFillNullExpr; ++i) { + SExprInfo* pExprInfo = &pFillNullExpr[i]; + pFillCol[i + numOfFillExpr + numOfNoFillExpr].pExpr = pExprInfo; + pFillCol[i + numOfFillExpr + numOfNoFillExpr].notFillCol = true; + pFillCol[i + numOfFillExpr + numOfNoFillExpr].fillNull = true; + } + return pFillCol; _end: diff --git a/source/libs/executor/src/timesliceoperator.c b/source/libs/executor/src/timesliceoperator.c index 2ea300ace8..6803f40da4 100644 --- a/source/libs/executor/src/timesliceoperator.c +++ b/source/libs/executor/src/timesliceoperator.c @@ -1147,7 +1147,8 @@ int32_t createTimeSliceOperatorInfo(SOperatorInfo* downstream, SPhysiNode* pPhyN pInfo->fillType = convertFillType(pInterpPhyNode->fillMode); initResultSizeInfo(&pOperator->resultInfo, 4096); - pInfo->pFillColInfo = createFillColInfo(pExprInfo, numOfExprs, NULL, 0, (SNodeListNode*)pInterpPhyNode->pFillValues); + pInfo->pFillColInfo = + createFillColInfo(pExprInfo, numOfExprs, NULL, 0, NULL, 0, (SNodeListNode*)pInterpPhyNode->pFillValues); QUERY_CHECK_NULL(pInfo->pFillColInfo, code, lino, _error, terrno); pInfo->pLinearInfo = NULL; diff --git a/source/libs/nodes/src/nodesCloneFuncs.c b/source/libs/nodes/src/nodesCloneFuncs.c index 5db8863311..c7914591f8 100644 --- a/source/libs/nodes/src/nodesCloneFuncs.c +++ b/source/libs/nodes/src/nodesCloneFuncs.c @@ -633,6 +633,7 @@ static int32_t logicFillCopy(const SFillLogicNode* pSrc, SFillLogicNode* pDst) { CLONE_NODE_FIELD(pWStartTs); CLONE_NODE_FIELD(pValues); COPY_OBJECT_FIELD(timeRange, sizeof(STimeWindow)); + CLONE_NODE_LIST_FIELD(pFillNullExprs); return TSDB_CODE_SUCCESS; } diff --git a/source/libs/nodes/src/nodesCodeFuncs.c b/source/libs/nodes/src/nodesCodeFuncs.c index 4b3ca97bbf..39c939419a 100644 --- a/source/libs/nodes/src/nodesCodeFuncs.c +++ b/source/libs/nodes/src/nodesCodeFuncs.c @@ -2843,6 +2843,7 @@ static const char* jkFillPhysiPlanWStartTs = "WStartTs"; static const char* jkFillPhysiPlanValues = "Values"; static const char* jkFillPhysiPlanStartTime = "StartTime"; static const char* jkFillPhysiPlanEndTime = "EndTime"; +static const char* jkFillPhysiPlanFillNullExprs = "FillNullExprs"; static int32_t physiFillNodeToJson(const void* pObj, SJson* pJson) { const SFillPhysiNode* pNode = (const SFillPhysiNode*)pObj; @@ -2869,6 +2870,9 @@ static int32_t physiFillNodeToJson(const void* pObj, SJson* pJson) { if (TSDB_CODE_SUCCESS == code) { code = tjsonAddIntegerToObject(pJson, jkFillPhysiPlanEndTime, pNode->timeRange.ekey); } + if (TSDB_CODE_SUCCESS == code) { + code = nodeListToJson(pJson, jkFillPhysiPlanFillNullExprs, pNode->pFillNullExprs); + } return code; } @@ -2898,6 +2902,9 @@ static int32_t jsonToPhysiFillNode(const SJson* pJson, void* pObj) { if (TSDB_CODE_SUCCESS == code) { code = tjsonGetBigIntValue(pJson, jkFillPhysiPlanEndTime, &pNode->timeRange.ekey); } + if (TSDB_CODE_SUCCESS == code) { + code = jsonToNodeList(pJson, jkFillPhysiPlanFillNullExprs, &pNode->pFillNullExprs); + } return code; } diff --git a/source/libs/nodes/src/nodesMsgFuncs.c b/source/libs/nodes/src/nodesMsgFuncs.c index 581c6222d2..3c36265093 100644 --- a/source/libs/nodes/src/nodesMsgFuncs.c +++ b/source/libs/nodes/src/nodesMsgFuncs.c @@ -3326,7 +3326,8 @@ enum { PHY_FILL_CODE_WSTART, PHY_FILL_CODE_VALUES, PHY_FILL_CODE_TIME_RANGE, - PHY_FILL_CODE_INPUT_TS_ORDER + PHY_FILL_CODE_INPUT_TS_ORDER, + PHY_FILL_CODE_FILL_NULL_EXPRS, }; static int32_t physiFillNodeToMsg(const void* pObj, STlvEncoder* pEncoder) { @@ -3351,6 +3352,9 @@ static int32_t physiFillNodeToMsg(const void* pObj, STlvEncoder* pEncoder) { if (TSDB_CODE_SUCCESS == code) { code = tlvEncodeObj(pEncoder, PHY_FILL_CODE_TIME_RANGE, timeWindowToMsg, &pNode->timeRange); } + if (TSDB_CODE_SUCCESS == code) { + code = tlvEncodeObj(pEncoder, PHY_FILL_CODE_FILL_NULL_EXPRS, nodeListToMsg, pNode->pFillNullExprs); + } return code; } @@ -3383,6 +3387,9 @@ static int32_t msgToPhysiFillNode(STlvDecoder* pDecoder, void* pObj) { case PHY_FILL_CODE_TIME_RANGE: code = tlvDecodeObjFromTlv(pTlv, msgToTimeWindow, (void**)&pNode->timeRange); break; + case PHY_FILL_CODE_FILL_NULL_EXPRS: + code = msgToNodeListFromTlv(pTlv, (void**)&pNode->pFillNullExprs); + break; default: break; } diff --git a/source/libs/nodes/src/nodesUtilFuncs.c b/source/libs/nodes/src/nodesUtilFuncs.c index c7d21d6fde..fc7bae072f 100644 --- a/source/libs/nodes/src/nodesUtilFuncs.c +++ b/source/libs/nodes/src/nodesUtilFuncs.c @@ -1469,6 +1469,7 @@ void nodesDestroyNode(SNode* pNode) { nodesDestroyNode(pLogicNode->pValues); nodesDestroyList(pLogicNode->pFillExprs); nodesDestroyList(pLogicNode->pNotFillExprs); + nodesDestroyList(pLogicNode->pFillNullExprs); break; } case QUERY_NODE_LOGIC_PLAN_SORT: { @@ -1634,6 +1635,7 @@ void nodesDestroyNode(SNode* pNode) { nodesDestroyList(pPhyNode->pNotFillExprs); nodesDestroyNode(pPhyNode->pWStartTs); nodesDestroyNode(pPhyNode->pValues); + nodesDestroyList(pPhyNode->pFillNullExprs); break; } case QUERY_NODE_PHYSICAL_PLAN_MERGE_SESSION: diff --git a/source/libs/planner/src/planLogicCreater.c b/source/libs/planner/src/planLogicCreater.c index 0d4da5c6f6..fdcb8df42f 100644 --- a/source/libs/planner/src/planLogicCreater.c +++ b/source/libs/planner/src/planLogicCreater.c @@ -1196,117 +1196,21 @@ static int32_t createWindowLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSele return TSDB_CODE_FAILED; } -typedef struct SPartFillExprsCtx { - bool hasFillCol; - bool hasPseudoWinCol; - bool hasGroupKeyCol; - SHashObj* pPseudoCols; - int32_t code; -} SPartFillExprsCtx; - -static EDealRes needFillValueImpl(SNode* pNode, void* pContext) { - SPartFillExprsCtx *pCtx = pContext; - if (QUERY_NODE_COLUMN == nodeType(pNode)) { - SColumnNode* pCol = (SColumnNode*)pNode; - if (COLUMN_TYPE_WINDOW_START == pCol->colType || COLUMN_TYPE_WINDOW_END == pCol->colType || - COLUMN_TYPE_WINDOW_DURATION == pCol->colType) { - pCtx->hasPseudoWinCol = true; - pCtx->code = taosHashPut(pCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); - } else if (COLUMN_TYPE_GROUP_KEY == pCol->colType || COLUMN_TYPE_TBNAME == pCol->colType || COLUMN_TYPE_TAG == pCol->colType) { - pCtx->hasGroupKeyCol = true; - pCtx->code = taosHashPut(pCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); - } else { - pCtx->hasFillCol = true; - return DEAL_RES_END; - } - } - return DEAL_RES_CONTINUE; -} - -static void needFillValue(SNode* pNode, SPartFillExprsCtx* pCtx) { - nodesWalkExpr(pNode, needFillValueImpl, pCtx); -} - typedef struct SCollectFillExprsCtx { SHashObj* pPseudoCols; - int32_t code; SNodeList* pFillExprs; SNodeList* pNotFillExprs; - bool skipFillCols; + bool collectAggFuncs; + SNodeList* pAggFuncCols; } SCollectFillExprsCtx; -static EDealRes collectFillExpr(SNode* pNode, void* pContext) { - SCollectFillExprsCtx* pCollectFillCtx = pContext; - SPartFillExprsCtx partFillCtx = {0}; - SNode* pNew = NULL; - partFillCtx.pPseudoCols = pCollectFillCtx->pPseudoCols; - needFillValue(pNode, &partFillCtx); - if (partFillCtx.code != TSDB_CODE_SUCCESS) { - pCollectFillCtx->code = partFillCtx.code; - return DEAL_RES_ERROR; - } - - if (partFillCtx.hasFillCol && !pCollectFillCtx->skipFillCols) { - if (nodeType(pNode) == QUERY_NODE_ORDER_BY_EXPR) { - pCollectFillCtx->code = nodesCloneNode(((SOrderByExprNode*)pNode)->pExpr, &pNew); - } else { - pCollectFillCtx->code = nodesCloneNode(pNode, &pNew); - } - if (pCollectFillCtx->code == TSDB_CODE_SUCCESS) { - pCollectFillCtx->code = nodesListMakeStrictAppend(&pCollectFillCtx->pFillExprs, pNew); - } - if (pCollectFillCtx->code != TSDB_CODE_SUCCESS) return DEAL_RES_ERROR; - return DEAL_RES_IGNORE_CHILD; - } - return DEAL_RES_CONTINUE; -} - -static int32_t collectFillExprs(SSelectStmt* pSelect, SNodeList** pFillExprs, SNodeList** pNotFillExprs) { - int32_t code = TSDB_CODE_SUCCESS; - SCollectFillExprsCtx collectFillCtx = {0}; - collectFillCtx.pPseudoCols = taosHashInit(4, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); - if (!collectFillCtx.pPseudoCols) return terrno; - - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - nodesWalkExprs(pSelect->pProjectionList, collectFillExpr, &collectFillCtx); - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - collectFillCtx.skipFillCols = true; - nodesWalkExpr(pSelect->pHaving, collectFillExpr, &collectFillCtx); - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - nodesWalkExprs(pSelect->pGroupByList, collectFillExpr, &collectFillCtx); - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - nodesWalkExprs(pSelect->pOrderByList, collectFillExpr, &collectFillCtx); - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - void* pIter = taosHashIterate(collectFillCtx.pPseudoCols, 0); - while (pIter) { - SNode* pNode = *(SNode**)pIter, *pNew = NULL; - collectFillCtx.code = nodesCloneNode(pNode, &pNew); - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - collectFillCtx.code = nodesListMakeStrictAppend(&collectFillCtx.pNotFillExprs, pNew); - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - pIter = taosHashIterate(collectFillCtx.pPseudoCols, pIter); - } else { - taosHashCancelIterate(collectFillCtx.pPseudoCols, pIter); - break; - } - } - if (collectFillCtx.code == TSDB_CODE_SUCCESS) { - TSWAP(*pFillExprs, collectFillCtx.pFillExprs); - TSWAP(*pNotFillExprs, collectFillCtx.pNotFillExprs); - } - } - if (collectFillCtx.code != TSDB_CODE_SUCCESS) { - if (collectFillCtx.pFillExprs) nodesDestroyList(collectFillCtx.pFillExprs); - if (collectFillCtx.pNotFillExprs) nodesDestroyList(collectFillCtx.pNotFillExprs); - } - taosHashCleanup(collectFillCtx.pPseudoCols); - return code; -} +typedef struct SWalkFillSubExprCtx { + bool hasFillCol; + bool hasPseudoWinCol; + bool hasGroupKeyCol; + SCollectFillExprsCtx* pCollectFillCtx; + int32_t code; +} SWalkFillSubExprCtx; static bool nodeAlreadyContained(SNodeList* pList, SNode* pNode) { SNode* pExpr = NULL; @@ -1318,6 +1222,116 @@ static bool nodeAlreadyContained(SNodeList* pList, SNode* pNode) { return false; } +static EDealRes needFillValueImpl(SNode* pNode, void* pContext) { + SWalkFillSubExprCtx *pCtx = pContext; + EDealRes res = DEAL_RES_CONTINUE; + if (QUERY_NODE_COLUMN == nodeType(pNode)) { + SColumnNode* pCol = (SColumnNode*)pNode; + if (COLUMN_TYPE_WINDOW_START == pCol->colType || COLUMN_TYPE_WINDOW_END == pCol->colType || + COLUMN_TYPE_WINDOW_DURATION == pCol->colType) { + pCtx->hasPseudoWinCol = true; + pCtx->code = + taosHashPut(pCtx->pCollectFillCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); + } else if (COLUMN_TYPE_GROUP_KEY == pCol->colType || COLUMN_TYPE_TBNAME == pCol->colType || + COLUMN_TYPE_TAG == pCol->colType) { + pCtx->hasGroupKeyCol = true; + pCtx->code = + taosHashPut(pCtx->pCollectFillCtx->pPseudoCols, pCol->colName, TSDB_COL_NAME_LEN, &pNode, POINTER_BYTES); + } else { + pCtx->hasFillCol = true; + if (pCtx->pCollectFillCtx->collectAggFuncs) { + // Agg funcs has already been rewriten to columns by Interval + // Here, we return DEAL_RES_CONTINUE cause we need to collect all agg funcs + if (!nodeAlreadyContained(pCtx->pCollectFillCtx->pFillExprs, pNode) && + !nodeAlreadyContained(pCtx->pCollectFillCtx->pAggFuncCols, pNode)) + pCtx->code = nodesListMakeStrictAppend(&pCtx->pCollectFillCtx->pAggFuncCols, pNode); + } else { + res = DEAL_RES_END; + } + } + } + if (pCtx->code != TSDB_CODE_SUCCESS) res = DEAL_RES_ERROR; + return res; +} + +static void needFillValue(SNode* pNode, SWalkFillSubExprCtx* pCtx) { + nodesWalkExpr(pNode, needFillValueImpl, pCtx); +} + +static int32_t collectFillExpr(SNode* pNode, SCollectFillExprsCtx* pCollectFillCtx) { + SNode* pNew = NULL; + SWalkFillSubExprCtx collectFillSubExprCtx = { + .hasFillCol = false, .hasPseudoWinCol = false, .hasGroupKeyCol = false, .pCollectFillCtx = pCollectFillCtx}; + needFillValue(pNode, &collectFillSubExprCtx); + if (collectFillSubExprCtx.code != TSDB_CODE_SUCCESS) { + return collectFillSubExprCtx.code; + } + + if (collectFillSubExprCtx.hasFillCol && !pCollectFillCtx->collectAggFuncs) { + if (nodeType(pNode) == QUERY_NODE_ORDER_BY_EXPR) { + collectFillSubExprCtx.code = nodesCloneNode(((SOrderByExprNode*)pNode)->pExpr, &pNew); + } else { + collectFillSubExprCtx.code = nodesCloneNode(pNode, &pNew); + } + if (collectFillSubExprCtx.code == TSDB_CODE_SUCCESS) { + collectFillSubExprCtx.code = nodesListMakeStrictAppend(&pCollectFillCtx->pFillExprs, pNew); + } + } + return collectFillSubExprCtx.code; +} + +static int32_t collectFillExprs(SSelectStmt* pSelect, SNodeList** pFillExprs, SNodeList** pNotFillExprs, + SNodeList** pPossibleFillNullCols) { + int32_t code = TSDB_CODE_SUCCESS; + SCollectFillExprsCtx collectFillCtx = {0}; + SNode* pNode = NULL; + collectFillCtx.pPseudoCols = taosHashInit(4, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + if (!collectFillCtx.pPseudoCols) return terrno; + + FOREACH(pNode, pSelect->pProjectionList) { + code = collectFillExpr(pNode, &collectFillCtx); + if (code != TSDB_CODE_SUCCESS) break; + } + collectFillCtx.collectAggFuncs = true; + if (code == TSDB_CODE_SUCCESS) { + code = collectFillExpr(pSelect->pHaving, &collectFillCtx); + } + if (code == TSDB_CODE_SUCCESS) { + FOREACH(pNode, pSelect->pOrderByList) { + code = collectFillExpr(pNode, &collectFillCtx); + if (code != TSDB_CODE_SUCCESS) break; + } + } + if (code == TSDB_CODE_SUCCESS) { + void* pIter = taosHashIterate(collectFillCtx.pPseudoCols, 0); + while (pIter) { + SNode* pNode = *(SNode**)pIter, *pNew = NULL; + code = nodesCloneNode(pNode, &pNew); + if (code == TSDB_CODE_SUCCESS) { + code = nodesListMakeStrictAppend(&collectFillCtx.pNotFillExprs, pNew); + } + if (code == TSDB_CODE_SUCCESS) { + pIter = taosHashIterate(collectFillCtx.pPseudoCols, pIter); + } else { + taosHashCancelIterate(collectFillCtx.pPseudoCols, pIter); + break; + } + } + if (code == TSDB_CODE_SUCCESS) { + TSWAP(*pFillExprs, collectFillCtx.pFillExprs); + TSWAP(*pNotFillExprs, collectFillCtx.pNotFillExprs); + TSWAP(*pPossibleFillNullCols, collectFillCtx.pAggFuncCols); + } + } + if (code != TSDB_CODE_SUCCESS) { + if (collectFillCtx.pFillExprs) nodesDestroyList(collectFillCtx.pFillExprs); + if (collectFillCtx.pNotFillExprs) nodesDestroyList(collectFillCtx.pNotFillExprs); + if (collectFillCtx.pAggFuncCols) nodesDestroyList(collectFillCtx.pAggFuncCols); + } + taosHashCleanup(collectFillCtx.pPseudoCols); + return code; +} + static int32_t createFillLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSelect, SLogicNode** pLogicNode) { if (NULL == pSelect->pWindow || QUERY_NODE_INTERVAL_WINDOW != nodeType(pSelect->pWindow) || NULL == ((SIntervalWindowNode*)pSelect->pWindow)->pFill) { @@ -1340,33 +1354,15 @@ static int32_t createFillLogicNode(SLogicPlanContext* pCxt, SSelectStmt* pSelect pFill->node.resultDataOrder = pFill->node.requireDataOrder; pFill->node.inputTsOrder = TSDB_ORDER_ASC; - code = collectFillExprs(pSelect, &pFill->pFillExprs, &pFill->pNotFillExprs); + code = collectFillExprs(pSelect, &pFill->pFillExprs, &pFill->pNotFillExprs, &pFill->pFillNullExprs); if (TSDB_CODE_SUCCESS == code) { code = rewriteExprsForSelect(pFill->pFillExprs, pSelect, SQL_CLAUSE_FILL, NULL); } if (TSDB_CODE_SUCCESS == code) { code = rewriteExprsForSelect(pFill->pNotFillExprs, pSelect, SQL_CLAUSE_FILL, NULL); } - SNodeList* pWindowTargets = NULL; - if (TSDB_CODE_SUCCESS == code) { - SNode* pNode = NULL, *pNodeNew = NULL; - FOREACH(pNode, pCxt->pCurrRoot->pTargets) { - if (nodesEqualNode(pNode, pFillNode->pWStartTs)) continue; - if (nodeAlreadyContained(pFill->pFillExprs, pNode)) continue; - if (nodeAlreadyContained(pFill->pNotFillExprs, pNode)) continue; - pNodeNew = NULL; - code = nodesCloneNode(pNode, &pNodeNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListMakeStrictAppend(&pWindowTargets, pNodeNew); - } - if (TSDB_CODE_SUCCESS != code) { - nodesDestroyList(pWindowTargets); - break; - } - } - } - if (TSDB_CODE_SUCCESS == code && LIST_LENGTH(pWindowTargets) > 0) { - code = nodesListMakeStrictAppendList(&pFill->pFillExprs, pWindowTargets); + if (TSDB_CODE_SUCCESS == code && LIST_LENGTH(pFill->pFillNullExprs) > 0) { + code = createColumnByRewriteExprs(pFill->pFillNullExprs, &pFill->node.pTargets); } if (TSDB_CODE_SUCCESS == code) { code = createColumnByRewriteExprs(pFill->pFillExprs, &pFill->node.pTargets); diff --git a/source/libs/planner/src/planPhysiCreater.c b/source/libs/planner/src/planPhysiCreater.c index a00eb05482..9a4a4f5055 100644 --- a/source/libs/planner/src/planPhysiCreater.c +++ b/source/libs/planner/src/planPhysiCreater.c @@ -2512,6 +2512,12 @@ static int32_t createFillPhysiNode(SPhysiPlanContext* pCxt, SNodeList* pChildren if (TSDB_CODE_SUCCESS == code) { code = addDataBlockSlots(pCxt, pFill->pNotFillExprs, pFill->node.pOutputDataBlockDesc); } + if (TSDB_CODE_SUCCESS == code && LIST_LENGTH(pFillNode->pFillNullExprs) > 0) { + code = setListSlotId(pCxt, pChildTupe->dataBlockId, -1, pFillNode->pFillNullExprs, &pFill->pFillNullExprs); + if (TSDB_CODE_SUCCESS == code ) { + code = addDataBlockSlots(pCxt, pFill->pFillNullExprs, pFill->node.pOutputDataBlockDesc); + } + } if (TSDB_CODE_SUCCESS == code) { code = setNodeSlotId(pCxt, pChildTupe->dataBlockId, -1, pFillNode->pWStartTs, &pFill->pWStartTs); diff --git a/tests/system-test/2-query/fill_with_group.py b/tests/system-test/2-query/fill_with_group.py index b48143db15..49c3b5dcf8 100644 --- a/tests/system-test/2-query/fill_with_group.py +++ b/tests/system-test/2-query/fill_with_group.py @@ -295,10 +295,14 @@ class TDTestCase: tdSql.query(sql, queryTimes=1) tdSql.checkRows(48) - sql = "SELECT count(*) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) having(timediff(last(ts), _wstart) >= 0)" + sql = "SELECT count(*) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) HAVING(timediff(last(ts), _wstart) >= 0)" tdSql.query(sql, queryTimes=1) tdSql.checkRows(60) + sql = "SELECT count(*) + 1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(NULL) HAVING(count(*) > 1)" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(0) + def run(self): self.prepareTestEnv() self.test_partition_by_with_interval_fill_prev_new_group_fill_error() From 0e6493d85d8b024b6d6bca8438ecc79b88d8045f Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 23 Oct 2024 15:55:33 +0800 Subject: [PATCH 065/102] enh(s3/default param value): new default values from s3 params --- include/util/tdef.h | 4 ++-- source/common/src/tglobal.c | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/include/util/tdef.h b/include/util/tdef.h index a0bfdc83f5..800cdfbd39 100644 --- a/include/util/tdef.h +++ b/include/util/tdef.h @@ -453,10 +453,10 @@ typedef enum ELogicConditionType { #define TSDB_DEFAULT_S3_CHUNK_SIZE (256 * 1024) #define TSDB_MIN_S3_KEEP_LOCAL (1 * 1440) // unit minute #define TSDB_MAX_S3_KEEP_LOCAL (365000 * 1440) -#define TSDB_DEFAULT_S3_KEEP_LOCAL (3650 * 1440) +#define TSDB_DEFAULT_S3_KEEP_LOCAL (365 * 1440) #define TSDB_MIN_S3_COMPACT 0 #define TSDB_MAX_S3_COMPACT 1 -#define TSDB_DEFAULT_S3_COMPACT 0 +#define TSDB_DEFAULT_S3_COMPACT 1 #define TSDB_DB_MIN_WAL_RETENTION_PERIOD -1 #define TSDB_REP_DEF_DB_WAL_RET_PERIOD 3600 diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c index 488b94eb20..fb118cb75c 100644 --- a/source/common/src/tglobal.c +++ b/source/common/src/tglobal.c @@ -287,7 +287,7 @@ int32_t tsTtlUnit = 86400; int32_t tsTtlPushIntervalSec = 10; int32_t tsTrimVDbIntervalSec = 60 * 60; // interval of trimming db in all vgroups int32_t tsS3MigrateIntervalSec = 60 * 60; // interval of s3migrate db in all vgroups -bool tsS3MigrateEnabled = 1; +bool tsS3MigrateEnabled = 0; int32_t tsGrantHBInterval = 60; int32_t tsUptimeInterval = 300; // seconds char tsUdfdResFuncs[512] = ""; // udfd resident funcs that teardown when udfd exits From b0e762885d54f42737981208f9a338a639e27bc3 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 23 Oct 2024 16:46:44 +0800 Subject: [PATCH 066/102] fix(ablob/_azure_sdk): build sdk under linux and s3 only --- contrib/CMakeLists.txt | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index d6ae5497f1..2a7bf84245 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -615,10 +615,9 @@ if (${BUILD_PCRE2}) add_subdirectory(pcre2 EXCLUDE_FROM_ALL) endif(${BUILD_PCRE2}) - -if(${TD_LINUX}) +if(${TD_LINUX} AND ${BUILD_WITH_S3}) add_subdirectory(azure-cmake EXCLUDE_FROM_ALL) -endif(${TD_LINUX}) +endif() # ================================================================================================ # Build test From cd470aa2a68df8ff0b7345da79e2fd57cd22ebc1 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 23 Oct 2024 17:00:56 +0800 Subject: [PATCH 067/102] s3/test: default duration to 100 with replicas --- tests/pytest/util/sql.py | 2 +- tests/script/tsim/db/create_all_options.sim | 6 ++++++ tests/system-test/2-query/db.py | 6 ++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/pytest/util/sql.py b/tests/pytest/util/sql.py index 3bc784063e..1d3333264a 100644 --- a/tests/pytest/util/sql.py +++ b/tests/pytest/util/sql.py @@ -104,7 +104,7 @@ class TDSql: for k, v in kwargs.items(): s += f" {k} {v}" if "duration" not in kwargs: - s += " duration 300" + s += " duration 100" self.cursor.execute(s) s = f'use {dbname}' self.cursor.execute(s) diff --git a/tests/script/tsim/db/create_all_options.sim b/tests/script/tsim/db/create_all_options.sim index e402223d93..4b39829b24 100644 --- a/tests/script/tsim/db/create_all_options.sim +++ b/tests/script/tsim/db/create_all_options.sim @@ -128,6 +128,12 @@ endi if $data21_db != 3000 then # wal_fsync_period return -1 endi +if $data30_db != 525600m then # s3_keeplocal + return -1 +endi +if $data31_db != 1 then # s3_compact + return -1 +endi sql drop database db diff --git a/tests/system-test/2-query/db.py b/tests/system-test/2-query/db.py index 1964cea51f..cd7c5bd26e 100644 --- a/tests/system-test/2-query/db.py +++ b/tests/system-test/2-query/db.py @@ -62,6 +62,12 @@ class TDTestCase: tdSql.query("show dnode 1 variables like '____debugFlag'") tdSql.checkRows(2) + tdSql.query("show dnode 1 variables like 's3MigrateEna%'") + tdSql.checkRows(1) + tdSql.checkData(0, 0, 1) + tdSql.checkData(0, 1, 's3MigrateEnabled') + tdSql.checkData(0, 2, 0) + def threadTest(self, threadID): print(f"Thread {threadID} starting...") tdsqln = tdCom.newTdSql() From 8c3ec327ba491bf9e5a0bb1e325dcaa819adfd0a Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Tue, 22 Oct 2024 15:28:33 +0800 Subject: [PATCH 068/102] fix drop table with tsma crash --- source/dnode/mnode/impl/src/mndStb.c | 32 +++++++++++++++++++++------- tests/system-test/2-query/tsma.py | 13 +++++------ 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/source/dnode/mnode/impl/src/mndStb.c b/source/dnode/mnode/impl/src/mndStb.c index b8cf72cd9e..2500d1ef40 100644 --- a/source/dnode/mnode/impl/src/mndStb.c +++ b/source/dnode/mnode/impl/src/mndStb.c @@ -4076,7 +4076,7 @@ typedef struct SMDropTbDbInfo { typedef struct SMDropTbTsmaInfo { char tsmaResTbDbFName[TSDB_DB_FNAME_LEN]; - char tsmaResTbNamePrefix[TSDB_TABLE_NAME_LEN]; + char tsmaResTbNamePrefix[TSDB_TABLE_FNAME_LEN]; int32_t suid; SMDropTbDbInfo dbInfo; // reference to DbInfo in pDbMap } SMDropTbTsmaInfo; @@ -4207,6 +4207,7 @@ static int32_t mndCreateDropTbsTxnPrepare(SRpcMsg *pRsp, SMndDropTbsWithTsmaCtx SMnode *pMnode = pRsp->info.node; STrans *pTrans = mndTransCreate(pMnode, TRN_POLICY_RETRY, TRN_CONFLICT_GLOBAL, pRsp, "drop-tbs"); mndTransSetChangeless(pTrans); + mndTransSetSerial(pTrans); if (pTrans == NULL) { code = TSDB_CODE_MND_RETURN_VALUE_NULL; if (terrno != 0) code = terrno; @@ -4294,6 +4295,18 @@ static int32_t mndDropTbAdd(SMnode *pMnode, SHashObj *pVgHashMap, const SVgroupI return 0; } +int vgInfoCmp(const void* lp, const void* rp) { + SVgroupInfo* pLeft = (SVgroupInfo*)lp; + SVgroupInfo* pRight = (SVgroupInfo*)rp; + if (pLeft->hashBegin < pRight->hashBegin) { + return -1; + } else if (pLeft->hashBegin > pRight->hashBegin) { + return 1; + } + + return 0; +} + static int32_t mndGetDbVgInfoForTsma(SMnode *pMnode, const char *dbname, SMDropTbTsmaInfo *pInfo) { int32_t code = 0; SDbObj *pDb = mndAcquireDb(pMnode, dbname); @@ -4308,6 +4321,7 @@ static int32_t mndGetDbVgInfoForTsma(SMnode *pMnode, const char *dbname, SMDropT goto _end; } mndBuildDBVgroupInfo(pDb, pMnode, pInfo->dbInfo.dbVgInfos); + taosArraySort(pInfo->dbInfo.dbVgInfos, vgInfoCmp); pInfo->dbInfo.hashPrefix = pDb->cfg.hashPrefix; pInfo->dbInfo.hashSuffix = pDb->cfg.hashSuffix; @@ -4380,9 +4394,8 @@ static int32_t mndDropTbAddTsmaResTbsForSingleVg(SMnode *pMnode, SMndDropTbsWith if (pInfos) { SMDropTbTsmaInfo info = {0}; int32_t len = sprintf(buf, "%s", pSma->name); - len = taosCreateMD5Hash(buf, len); sprintf(info.tsmaResTbDbFName, "%s", pSma->db); - snprintf(info.tsmaResTbNamePrefix, TSDB_TABLE_NAME_LEN, "%s", buf); + snprintf(info.tsmaResTbNamePrefix, TSDB_TABLE_FNAME_LEN, "%s", buf); SMDropTbDbInfo *pDbInfo = taosHashGet(pCtx->pDbMap, pSma->db, TSDB_DB_FNAME_LEN); info.suid = pSma->dstTbUid; if (!pDbInfo) { @@ -4417,14 +4430,17 @@ static int32_t mndDropTbAddTsmaResTbsForSingleVg(SMnode *pMnode, SMndDropTbsWith SMDropTbTsmaInfos *pInfos = taosHashGet(pCtx->pTsmaMap, &pTb->suid, sizeof(pTb->suid)); SArray *pVgInfos = NULL; - char buf[TSDB_TABLE_FNAME_LEN]; + char buf[TSDB_TABLE_FNAME_LEN + TSDB_TABLE_NAME_LEN + 1]; + char resTbFullName[TSDB_TABLE_FNAME_LEN + 1] = {0}; for (int32_t j = 0; j < pInfos->pTsmaInfos->size; ++j) { SMDropTbTsmaInfo *pInfo = taosArrayGet(pInfos->pTsmaInfos, j); - int32_t len = sprintf(buf, "%s.%s_%s", pInfo->tsmaResTbDbFName, pInfo->tsmaResTbNamePrefix, pTb->name); - uint32_t hashVal = - taosGetTbHashVal(buf, len, pInfo->dbInfo.hashMethod, pInfo->dbInfo.hashPrefix, pInfo->dbInfo.hashSuffix); + int32_t len = sprintf(buf, "%s_%s", pInfo->tsmaResTbNamePrefix, pTb->name); + len = taosCreateMD5Hash(buf, len); + len = snprintf(resTbFullName, TSDB_TABLE_FNAME_LEN + 1, "%s.%s", pInfo->tsmaResTbDbFName, buf); + uint32_t hashVal = taosGetTbHashVal(resTbFullName, len, pInfo->dbInfo.hashMethod, pInfo->dbInfo.hashPrefix, + pInfo->dbInfo.hashSuffix); const SVgroupInfo *pVgInfo = taosArraySearch(pInfo->dbInfo.dbVgInfos, &hashVal, vgHashValCmp, TD_EQ); - void *p = taosStrdup(buf + strlen(pInfo->tsmaResTbDbFName) + TSDB_NAME_DELIMITER_LEN); + void *p = taosStrdup(resTbFullName + strlen(pInfo->tsmaResTbDbFName) + TSDB_NAME_DELIMITER_LEN); if (taosArrayPush(pCtx->pResTbNames, &p) == NULL) { code = terrno; goto _end; diff --git a/tests/system-test/2-query/tsma.py b/tests/system-test/2-query/tsma.py index f05398600b..976e190e44 100644 --- a/tests/system-test/2-query/tsma.py +++ b/tests/system-test/2-query/tsma.py @@ -1338,15 +1338,16 @@ class TDTestCase: self.create_tsma('tsma1', 'test', 'meters', ['avg(c1)', 'avg(c2)'], '5m') tdSql.execute('alter table test.t0 ttl 2', queryTimes=1) tdSql.execute('flush database test') - self.wait_query('show test.tables like "%t0"', 0, wait_query_seconds) + res_tb = TSMAQCBuilder().md5('1.test.tsma1_t0') + self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) # test drop multi tables tdSql.execute('drop table test.t3, test.t4') - self.wait_query('show test.tables like "%t3"', 0, wait_query_seconds) - self.wait_query('show test.tables like "%t4"', 0, wait_query_seconds) - - tdSql.query('show test.tables like "%tsma%"') - tdSql.checkRows(0) + res_tb = TSMAQCBuilder().md5('1.test.tsma1_t3') + self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) + res_tb = TSMAQCBuilder().md5('1.test.tsma1_t4') + self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) + time.sleep(9999999) # test drop stream tdSql.error('drop stream tsma1', -2147471088) ## TSMA must be dropped first From 4b37a1092dc2aa52c49111865cf58bebb727e458 Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Wed, 23 Oct 2024 17:16:39 +0800 Subject: [PATCH 069/102] fix limit --- source/libs/transport/src/transCli.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 9e0e6a0d24..73037dc381 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -855,7 +855,7 @@ static int32_t cliGetConnFromPool(SCliThrd* pThrd, const char* key, SCliConn** p } if (QUEUE_IS_EMPTY(&plist->conns)) { - if (plist->size >= pInst->connLimitNum) { + if (plist->totaSize >= pInst->connLimitNum) { return TSDB_CODE_RPC_MAX_SESSIONS; } plist->totaSize += 1; From ebaef4a537ba6240ec5092357f670ddf9211759f Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Wed, 23 Oct 2024 17:29:28 +0800 Subject: [PATCH 070/102] fix limit --- source/libs/transport/src/transCli.c | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 73037dc381..2214a4cc43 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -27,7 +27,7 @@ typedef struct { typedef struct SConnList { queue conns; int32_t size; - int32_t totaSize; + int32_t totalSize; } SConnList; typedef struct { @@ -855,10 +855,10 @@ static int32_t cliGetConnFromPool(SCliThrd* pThrd, const char* key, SCliConn** p } if (QUEUE_IS_EMPTY(&plist->conns)) { - if (plist->totaSize >= pInst->connLimitNum) { + if (plist->totalSize >= pInst->connLimitNum) { return TSDB_CODE_RPC_MAX_SESSIONS; } - plist->totaSize += 1; + plist->totalSize += 1; return TSDB_CODE_RPC_NETWORK_BUSY; } @@ -1249,7 +1249,7 @@ static void cliHandleException(SCliConn* conn) { cliDestroyAllQidFromThrd(conn); QUEUE_REMOVE(&conn->q); if (conn->list) { - conn->list->totaSize -= 1; + conn->list->totalSize -= 1; conn->list = NULL; } @@ -3739,7 +3739,7 @@ static FORCE_INLINE int8_t shouldSWitchToOtherConn(SCliConn* pConn, char* key) { tTrace("conn %p get list %p from pool for key:%s", pConn, pConn->list, key); } } - if (pConn->list && pConn->list->totaSize >= pInst->connLimitNum / 4) { + if (pConn->list && pConn->list->totalSize >= pInst->connLimitNum / 4) { tWarn("%s conn %p try to remove timeout msg since too many conn created", transLabel(pInst), pConn); if (cliConnRemoveTimeoutMsg(pConn)) { From 12b21258372c2a01e1b1620fc285b98cda1df45a Mon Sep 17 00:00:00 2001 From: Alex Duan <51781608+DuanKuanJun@users.noreply.github.com> Date: Wed, 23 Oct 2024 18:12:30 +0800 Subject: [PATCH 071/102] Update 08-taos-cli.md taos-CLI --- docs/zh/14-reference/02-tools/08-taos-cli.md | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/docs/zh/14-reference/02-tools/08-taos-cli.md b/docs/zh/14-reference/02-tools/08-taos-cli.md index c388e7edda..2255aed9be 100644 --- a/docs/zh/14-reference/02-tools/08-taos-cli.md +++ b/docs/zh/14-reference/02-tools/08-taos-cli.md @@ -4,11 +4,11 @@ sidebar_label: taos toc_max_heading_level: 4 --- -TDengine 命令行程序(以下简称 TDengine CLI)是用户操作 TDengine 实例并与之交互的最简洁最常用的方式。 使用前需要安装 TDengine Server 安装包或 TDengine Client 安装包。 +TDengine 命令行程序(以下简称 TDengine CLI)是用户操作 TDengine 实例并与之交互的最简洁最常用工具。 使用前需要安装 TDengine Server 安装包或 TDengine Client 安装包。 ## 启动 -要进入 TDengine CLI,您只要在终端执行 `taos` 即可。 +要进入 TDengine CLI,您在终端执行 `taos` 即可。 ```bash taos @@ -23,6 +23,11 @@ taos> ``` 进入 TDengine CLI 后,你可执行各种 SQL 语句,包括插入、查询以及各种管理命令。 +退出 TDengine CLI, 执行 `q` 或 `quit` 或 `exit` 回车即可 +```shell +taos> quit +``` + ## 执行 SQL 脚本 @@ -66,7 +71,7 @@ taos> SET MAX_BINARY_DISPLAY_WIDTH ; - -l PKTLEN: 网络测试时使用的测试包大小 - -n NETROLE: 网络连接测试时的测试范围,默认为 `client`, 可选值为 `client`、`server` - -N PKTNUM: 网络测试时使用的测试包数量 -- -r: 将时间输出出无符号 64 位整数类型(即 C 语音中 uint64_t) +- -r: 将时间列转化为无符号 64 位整数类型输出(即 C 语音中 uint64_t) - -R: 使用 RESTful 模式连接服务端 - -s COMMAND: 以非交互模式执行的 SQL 命令 - -t: 测试服务端启动状态,状态同-k @@ -84,6 +89,13 @@ taos -h h1.taos.com -s "use db; show tables;" 也可以通过配置文件中的参数设置来控制 TDengine CLI 的行为。可用配置参数请参考[客户端配置](../../components/taosc) +## TDengine CLI TAB 键补全 + +- TAB 键前为空命令状态下按 TAB 键,会列出 TDengine CLI 支持的所有命令 +- TAB 键前为空格状态下按 TAB 键,会列出这个位置可以出现的命令词,再次按 TAB 键切换为下一个可以出现的命令词 +- TAB 键前有字母,会查找这个位置可以出现的所有命令词并前缀与前面字母相同的命令词,再次按 TAB 键切换为下一个可以出现的命令词 +- 输入反斜杠 `\` + TAB 键, 会自动补全为列式显示模式的命令词 `\G;` + ## TDengine CLI 小技巧 - 可以使用上下光标键查看历史输入的指令 @@ -91,7 +103,6 @@ taos -h h1.taos.com -s "use db; show tables;" - Ctrl+C 中止正在进行中的查询 - 执行 `RESET QUERY CACHE` 可清除本地表 Schema 的缓存 - 批量执行 SQL 语句。可以将一系列的 TDengine CLI 命令(以英文 ; 结尾,每个 SQL 语句为一行)按行存放在文件里,在 TDengine CLI 里执行命令 `source ` 自动执行该文件里所有的 SQL 语句 -- 输入 `q` 或 `quit` 或 `exit` 回车,可以退出 TDengine CLI ## TDengine CLI 导出查询结果到文件中 From a7eb3c2607f1480fe537cab4fdea7c7c20059716 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Wed, 23 Oct 2024 18:19:37 +0800 Subject: [PATCH 072/102] fix filloperator memory leak --- source/libs/executor/src/filloperator.c | 1 + 1 file changed, 1 insertion(+) diff --git a/source/libs/executor/src/filloperator.c b/source/libs/executor/src/filloperator.c index d7a55c86bb..1595c90419 100644 --- a/source/libs/executor/src/filloperator.c +++ b/source/libs/executor/src/filloperator.c @@ -337,6 +337,7 @@ void destroyFillOperatorInfo(void* param) { pInfo->pFinalRes = NULL; cleanupExprSupp(&pInfo->noFillExprSupp); + cleanupExprSupp(&pInfo->fillNullExprSupp); taosMemoryFreeClear(pInfo->p); taosArrayDestroy(pInfo->matchInfo.pList); From 1e63510dfdf2671b125d6eaacdfea9546c58d38a Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 23 Oct 2024 18:38:15 +0800 Subject: [PATCH 073/102] s3/mnd: fix default value resetting --- source/dnode/mnode/impl/src/mndDb.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/dnode/mnode/impl/src/mndDb.c b/source/dnode/mnode/impl/src/mndDb.c index 7c42564f4c..aed00af3c1 100644 --- a/source/dnode/mnode/impl/src/mndDb.c +++ b/source/dnode/mnode/impl/src/mndDb.c @@ -583,7 +583,7 @@ static void mndSetDefaultDbCfg(SDbCfg *pCfg) { if (pCfg->tsdbPageSize <= 0) pCfg->tsdbPageSize = TSDB_DEFAULT_TSDB_PAGESIZE; if (pCfg->s3ChunkSize <= 0) pCfg->s3ChunkSize = TSDB_DEFAULT_S3_CHUNK_SIZE; if (pCfg->s3KeepLocal <= 0) pCfg->s3KeepLocal = TSDB_DEFAULT_S3_KEEP_LOCAL; - if (pCfg->s3Compact <= 0) pCfg->s3Compact = TSDB_DEFAULT_S3_COMPACT; + if (pCfg->s3Compact < 0) pCfg->s3Compact = TSDB_DEFAULT_S3_COMPACT; if (pCfg->withArbitrator < 0) pCfg->withArbitrator = TSDB_DEFAULT_DB_WITH_ARBITRATOR; if (pCfg->encryptAlgorithm < 0) pCfg->encryptAlgorithm = TSDB_DEFAULT_ENCRYPT_ALGO; } From 19bd1e933b318324f9f6b2d8a27222d0a5d7726f Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Wed, 23 Oct 2024 18:52:29 +0800 Subject: [PATCH 074/102] use 100 from army/frame/sql.py duration --- tests/army/frame/sql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/army/frame/sql.py b/tests/army/frame/sql.py index 8b99219524..b4bc31b6a8 100644 --- a/tests/army/frame/sql.py +++ b/tests/army/frame/sql.py @@ -73,7 +73,7 @@ class TDSql: for k, v in kwargs.items(): s += f" {k} {v}" if "duration" not in kwargs: - s += " duration 300" + s += " duration 100" self.cursor.execute(s) s = f'use {dbname}' self.cursor.execute(s) From 58bcde208376250bea358dc648236d0576331412 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Wed, 23 Oct 2024 19:30:34 +0800 Subject: [PATCH 075/102] fix test tsma.py --- tests/system-test/2-query/tsma.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/system-test/2-query/tsma.py b/tests/system-test/2-query/tsma.py index 976e190e44..1e115cff79 100644 --- a/tests/system-test/2-query/tsma.py +++ b/tests/system-test/2-query/tsma.py @@ -1347,7 +1347,6 @@ class TDTestCase: self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) res_tb = TSMAQCBuilder().md5('1.test.tsma1_t4') self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) - time.sleep(9999999) # test drop stream tdSql.error('drop stream tsma1', -2147471088) ## TSMA must be dropped first From 410fb6c2c57e9768ff881d557252ea9d377575ac Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 24 Oct 2024 07:41:36 +0800 Subject: [PATCH 076/102] fix limit --- source/libs/transport/src/transCli.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 2214a4cc43..0792baf0a1 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -2363,7 +2363,7 @@ static int32_t createThrdObj(void* trans, SCliThrd** ppThrd) { } } - pThrd->pool = createConnPool(4); + pThrd->pool = createConnPool(32); if (pThrd->pool == NULL) { code = terrno; TAOS_CHECK_GOTO(terrno, NULL, _end); From b5bea478834f5128db2a6cdd2ba2f94641ee3aba Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 24 Oct 2024 07:46:48 +0800 Subject: [PATCH 077/102] fix limit --- source/libs/transport/src/transCli.c | 12 ++++++------ source/libs/transport/src/transSvr.c | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 0792baf0a1..a9de188ad2 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -1046,7 +1046,7 @@ static int32_t cliCreateConn(SCliThrd* pThrd, SCliConn** pCliConn, char* ip, int conn->hostThrd = pThrd; conn->seq = 0; - conn->pQTable = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, HASH_NO_LOCK); + conn->pQTable = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, HASH_NO_LOCK); if (conn->pQTable == NULL) { TAOS_CHECK_GOTO(terrno, NULL, _failed); } @@ -2363,7 +2363,7 @@ static int32_t createThrdObj(void* trans, SCliThrd** ppThrd) { } } - pThrd->pool = createConnPool(32); + pThrd->pool = createConnPool(64); if (pThrd->pool == NULL) { code = terrno; TAOS_CHECK_GOTO(terrno, NULL, _end); @@ -2382,22 +2382,22 @@ static int32_t createThrdObj(void* trans, SCliThrd** ppThrd) { pThrd->destroyAhandleFp = pInst->destroyFp; - pThrd->fqdn2ipCache = taosHashInit(8, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + pThrd->fqdn2ipCache = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); if (pThrd->fqdn2ipCache == NULL) { TAOS_CHECK_GOTO(terrno, NULL, _end); } - pThrd->batchCache = taosHashInit(8, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + pThrd->batchCache = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); if (pThrd->batchCache == NULL) { TAOS_CHECK_GOTO(terrno, NULL, _end); } - pThrd->connHeapCache = taosHashInit(8, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); + pThrd->connHeapCache = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, HASH_NO_LOCK); if (pThrd->connHeapCache == NULL) { TAOS_CHECK_GOTO(TSDB_CODE_OUT_OF_MEMORY, NULL, _end); } - pThrd->pIdConnTable = taosHashInit(512, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), false, HASH_NO_LOCK); + pThrd->pIdConnTable = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), false, HASH_NO_LOCK); if (pThrd->connHeapCache == NULL) { TAOS_CHECK_GOTO(TSDB_CODE_OUT_OF_MEMORY, NULL, _end); } diff --git a/source/libs/transport/src/transSvr.c b/source/libs/transport/src/transSvr.c index a7c24f3fae..5723f2ff23 100644 --- a/source/libs/transport/src/transSvr.c +++ b/source/libs/transport/src/transSvr.c @@ -239,7 +239,7 @@ SIpWhiteListTab* uvWhiteListCreate() { return NULL; } - pWhiteList->pList = taosHashInit(8, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), 0, HASH_NO_LOCK); + pWhiteList->pList = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), 0, HASH_NO_LOCK); if (pWhiteList->pList == NULL) { taosMemoryFree(pWhiteList); return NULL; @@ -1333,7 +1333,7 @@ static FORCE_INLINE SSvrConn* createConn(void* hThrd) { QUEUE_INIT(&exh->q); tTrace("%s handle %p, conn %p created, refId:%" PRId64, transLabel(pInst), exh, pConn, pConn->refId); - pConn->pQTable = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, HASH_NO_LOCK); + pConn->pQTable = taosHashInit(1024, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, HASH_NO_LOCK); if (pConn->pQTable == NULL) { TAOS_CHECK_GOTO(TSDB_CODE_OUT_OF_MEMORY, &lino, _end); } From 2630987e5ecf9de7d49a2831ab61a5c96d360c1b Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 24 Oct 2024 07:54:53 +0800 Subject: [PATCH 078/102] fix limit --- source/libs/transport/src/transCli.c | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index a9de188ad2..4d340d5664 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -858,7 +858,6 @@ static int32_t cliGetConnFromPool(SCliThrd* pThrd, const char* key, SCliConn** p if (plist->totalSize >= pInst->connLimitNum) { return TSDB_CODE_RPC_MAX_SESSIONS; } - plist->totalSize += 1; return TSDB_CODE_RPC_NETWORK_BUSY; } @@ -1548,10 +1547,15 @@ static int32_t cliDoConn(SCliThrd* pThrd, SCliConn* conn) { } transRefCliHandle(conn); + + conn->list = taosHashGet((SHashObj*)pThrd->pool, conn->dstAddr, strlen(conn->dstAddr)); + if (conn->list != NULL) { + conn->list->totalSize += 1; + } + ret = uv_tcp_connect(&conn->connReq, (uv_tcp_t*)(conn->stream), (const struct sockaddr*)&addr, cliConnCb); if (ret != 0) { tError("failed connect to %s since %s", conn->dstAddr, uv_err_name(ret)); - TAOS_CHECK_GOTO(TSDB_CODE_THIRDPARTY_ERROR, &lino, _exception1); } From eff63141416c99057ba7b81febc8992f1c188920 Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 24 Oct 2024 08:04:17 +0800 Subject: [PATCH 079/102] fix limit --- source/libs/transport/src/transCli.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/libs/transport/src/transCli.c b/source/libs/transport/src/transCli.c index 4d340d5664..c3e214b5e3 100644 --- a/source/libs/transport/src/transCli.c +++ b/source/libs/transport/src/transCli.c @@ -2367,7 +2367,7 @@ static int32_t createThrdObj(void* trans, SCliThrd** ppThrd) { } } - pThrd->pool = createConnPool(64); + pThrd->pool = createConnPool(128); if (pThrd->pool == NULL) { code = terrno; TAOS_CHECK_GOTO(terrno, NULL, _end); From 6650dd50f1e25479f028c165260bad0f1dabb0a4 Mon Sep 17 00:00:00 2001 From: yihaoDeng Date: Thu, 24 Oct 2024 08:56:58 +0800 Subject: [PATCH 080/102] fix limit --- source/common/src/tglobal.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/common/src/tglobal.c b/source/common/src/tglobal.c index 7cff5de008..0cfd5c20ac 100644 --- a/source/common/src/tglobal.c +++ b/source/common/src/tglobal.c @@ -56,7 +56,7 @@ int32_t tsShellActivityTimer = 3; // second // queue & threads int32_t tsNumOfRpcThreads = 1; int32_t tsNumOfRpcSessions = 30000; -int32_t tsShareConnLimit = 8; +int32_t tsShareConnLimit = 10; int32_t tsReadTimeout = 900; int32_t tsTimeToGetAvailableConn = 500000; int32_t tsKeepAliveIdle = 60; From a75b1d73a02228fb5c976f7adbcaf93bd5a7e55f Mon Sep 17 00:00:00 2001 From: Alex Duan <51781608+DuanKuanJun@users.noreply.github.com> Date: Thu, 24 Oct 2024 09:13:04 +0800 Subject: [PATCH 081/102] Update 08-taos-cli.md --- docs/zh/14-reference/02-tools/08-taos-cli.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/zh/14-reference/02-tools/08-taos-cli.md b/docs/zh/14-reference/02-tools/08-taos-cli.md index 2255aed9be..d0b4c0ca0f 100644 --- a/docs/zh/14-reference/02-tools/08-taos-cli.md +++ b/docs/zh/14-reference/02-tools/08-taos-cli.md @@ -92,9 +92,9 @@ taos -h h1.taos.com -s "use db; show tables;" ## TDengine CLI TAB 键补全 - TAB 键前为空命令状态下按 TAB 键,会列出 TDengine CLI 支持的所有命令 -- TAB 键前为空格状态下按 TAB 键,会列出这个位置可以出现的命令词,再次按 TAB 键切换为下一个可以出现的命令词 -- TAB 键前有字母,会查找这个位置可以出现的所有命令词并前缀与前面字母相同的命令词,再次按 TAB 键切换为下一个可以出现的命令词 -- 输入反斜杠 `\` + TAB 键, 会自动补全为列式显示模式的命令词 `\G;` +- TAB 键前为空格状态下按 TAB 键,会显示此位置可以出现的所有命令词的第一个,再次按 TAB 键切为下一个 +- TAB 键前为字符串,会搜索与此字符串前缀匹配的所有可出现命令词,并显示第一个,再次按 TAB 键切为下一个 +- 输入反斜杠 `\` + TAB 键, 会自动补全为列显示模式命令词 `\G;` ## TDengine CLI 小技巧 From 80522809efd22f12117014a943b9669bbdf33d10 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 24 Oct 2024 09:16:24 +0800 Subject: [PATCH 082/102] test/system: use 100 instead of 300 --- tests/system-test/0-others/compatibility.py | 2 +- tests/system-test/0-others/compatibility_coverage.py | 2 +- tests/system-test/0-others/udfTest.py | 2 +- tests/system-test/0-others/udf_cfg1.py | 2 +- tests/system-test/0-others/udf_cfg2.py | 2 +- tests/system-test/0-others/udf_cluster.py | 2 +- tests/system-test/0-others/udf_create.py | 2 +- tests/system-test/0-others/udf_restart_taosd.py | 2 +- tests/system-test/2-query/abs.py | 2 +- tests/system-test/2-query/and_or_for_byte.py | 2 +- tests/system-test/2-query/countAlwaysReturnValue.py | 2 +- tests/system-test/2-query/distribute_agg_apercentile.py | 2 +- tests/system-test/2-query/distribute_agg_avg.py | 2 +- tests/system-test/2-query/distribute_agg_max.py | 2 +- tests/system-test/2-query/distribute_agg_min.py | 2 +- tests/system-test/2-query/distribute_agg_spread.py | 2 +- tests/system-test/2-query/distribute_agg_stddev.py | 2 +- tests/system-test/2-query/distribute_agg_sum.py | 2 +- tests/system-test/2-query/function_null.py | 4 ++-- tests/system-test/2-query/irate.py | 2 +- tests/system-test/2-query/last_row.py | 6 +++--- tests/system-test/2-query/sample.py | 2 +- tests/system-test/2-query/twa.py | 2 +- tests/system-test/6-cluster/5dnode1mnode.py | 2 +- tests/system-test/6-cluster/5dnode2mnode.py | 2 +- tests/system-test/6-cluster/5dnode3mnodeAdd1Ddnoe.py | 4 ++-- tests/system-test/6-cluster/5dnode3mnodeDrop.py | 2 +- tests/system-test/6-cluster/5dnode3mnodeDropInsert.py | 4 ++-- tests/system-test/6-cluster/5dnode3mnodeRecreateMnode.py | 4 ++-- .../6-cluster/5dnode3mnodeRestartDnodeInsertData.py | 4 ++-- tests/system-test/6-cluster/5dnode3mnodeRoll.py | 6 +++--- .../6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py | 4 ++-- .../6-cluster/5dnode3mnodeSep1VnodeStopDnodeInsertData.py | 4 ++-- .../6-cluster/5dnode3mnodeSep1VnodeStopDnodeRCreateDb.py | 4 ++-- .../6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py | 4 ++-- .../6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py | 4 ++-- .../6-cluster/5dnode3mnodeSepVnodeStopDnodeCreateUser.py | 4 ++-- tests/system-test/6-cluster/5dnode3mnodeStopInsert.py | 4 ++-- .../6dnode3mnodeInsertDataRebootAlterRep1-3.py | 4 ++-- .../6dnode3mnodeInsertDatarRebootAlterRep1-3.py | 4 ++-- .../6dnode3mnodeInsertLessDataAlterRep3to1to3.py | 4 ++-- .../6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py | 2 +- .../vnode/4dnode1mnode_basic_replica1_insertdatas.py | 2 +- .../vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py | 2 +- .../vnode/4dnode1mnode_basic_replica3_insertdatas.py | 2 +- .../vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py | 2 +- ...ic_replica3_insertdatas_querys_loop_restart_all_vnode.py | 2 +- ...sic_replica3_insertdatas_querys_loop_restart_follower.py | 2 +- ...basic_replica3_insertdatas_querys_loop_restart_leader.py | 2 +- ...e1mnode_basic_replica3_insertdatas_stop_follower_sync.py | 2 +- ...mnode_basic_replica3_insertdatas_stop_follower_unsync.py | 2 +- ..._replica3_insertdatas_stop_follower_unsync_force_stop.py | 2 +- ...de_basic_replica3_insertdatas_stop_leader_forece_stop.py | 2 +- ...4dnode1mnode_basic_replica3_mnode3_insertdatas_querys.py | 2 +- .../6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py | 2 +- .../vnode/4dnode1mnode_basic_replica3_vgroups_stopOne.py | 2 +- 56 files changed, 75 insertions(+), 75 deletions(-) diff --git a/tests/system-test/0-others/compatibility.py b/tests/system-test/0-others/compatibility.py index 9ba3bd0d2f..7c3eb48fe1 100644 --- a/tests/system-test/0-others/compatibility.py +++ b/tests/system-test/0-others/compatibility.py @@ -32,7 +32,7 @@ class TDTestCase: self.replicaVar = int(replicaVar) tdLog.debug(f"start to excute {__file__}") tdSql.init(conn.cursor()) - self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 300 stt_trigger 1; ;use deldata; + self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 100 stt_trigger 1; ;use deldata; create table deldata.stb1 (ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp) tags (t1 int); create table deldata.ct1 using deldata.stb1 tags ( 1 ); insert into deldata.ct1 values ( now()-0s, 0, 0, 0, 0, 0.0, 0.0, 0, 'binary0', 'nchar0', now()+0a ) ( now()-10s, 1, 11111, 111, 11, 1.11, 11.11, 1, 'binary1', 'nchar1', now()+1a ) ( now()-20s, 2, 22222, 222, 22, 2.22, 22.22, 0, 'binary2', 'nchar2', now()+2a ) ( now()-30s, 3, 33333, 333, 33, 3.33, 33.33, 1, 'binary3', 'nchar3', now()+3a ); diff --git a/tests/system-test/0-others/compatibility_coverage.py b/tests/system-test/0-others/compatibility_coverage.py index 6eccf78c5a..bf76892777 100644 --- a/tests/system-test/0-others/compatibility_coverage.py +++ b/tests/system-test/0-others/compatibility_coverage.py @@ -30,7 +30,7 @@ class TDTestCase: self.replicaVar = int(replicaVar) tdLog.debug(f"start to excute {__file__}") tdSql.init(conn.cursor()) - self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 300 stt_trigger 1; ;use deldata; + self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 100 stt_trigger 1; ;use deldata; create table deldata.stb1 (ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp) tags (t1 int); create table deldata.ct1 using deldata.stb1 tags ( 1 ); insert into deldata.ct1 values ( now()-0s, 0, 0, 0, 0, 0.0, 0.0, 0, 'binary0', 'nchar0', now()+0a ) ( now()-10s, 1, 11111, 111, 11, 1.11, 11.11, 1, 'binary1', 'nchar1', now()+1a ) ( now()-20s, 2, 22222, 222, 22, 2.22, 22.22, 0, 'binary2', 'nchar2', now()+2a ) ( now()-30s, 3, 33333, 333, 33, 3.33, 33.33, 1, 'binary3', 'nchar3', now()+3a ); diff --git a/tests/system-test/0-others/udfTest.py b/tests/system-test/0-others/udfTest.py index 88d0d420f7..829a8aec27 100644 --- a/tests/system-test/0-others/udfTest.py +++ b/tests/system-test/0-others/udfTest.py @@ -61,7 +61,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db ") - tdSql.execute("create database if not exists db duration 300") + tdSql.execute("create database if not exists db duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/0-others/udf_cfg1.py b/tests/system-test/0-others/udf_cfg1.py index 913e5fcca1..a92f3bce31 100644 --- a/tests/system-test/0-others/udf_cfg1.py +++ b/tests/system-test/0-others/udf_cfg1.py @@ -63,7 +63,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db ") - tdSql.execute("create database if not exists db duration 300") + tdSql.execute("create database if not exists db duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/0-others/udf_cfg2.py b/tests/system-test/0-others/udf_cfg2.py index b535b4f626..89c4030977 100644 --- a/tests/system-test/0-others/udf_cfg2.py +++ b/tests/system-test/0-others/udf_cfg2.py @@ -63,7 +63,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db ") - tdSql.execute("create database if not exists db duration 300") + tdSql.execute("create database if not exists db duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/0-others/udf_cluster.py b/tests/system-test/0-others/udf_cluster.py index 9253be4ea3..c41412c10d 100644 --- a/tests/system-test/0-others/udf_cluster.py +++ b/tests/system-test/0-others/udf_cluster.py @@ -64,7 +64,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db") - tdSql.execute("create database if not exists db replica 1 duration 300") + tdSql.execute("create database if not exists db replica 1 duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/0-others/udf_create.py b/tests/system-test/0-others/udf_create.py index 6071561035..9038d99ff9 100644 --- a/tests/system-test/0-others/udf_create.py +++ b/tests/system-test/0-others/udf_create.py @@ -73,7 +73,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db ") - tdSql.execute("create database if not exists db duration 300") + tdSql.execute("create database if not exists db duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/0-others/udf_restart_taosd.py b/tests/system-test/0-others/udf_restart_taosd.py index 61b6a4ea68..c99e864e71 100644 --- a/tests/system-test/0-others/udf_restart_taosd.py +++ b/tests/system-test/0-others/udf_restart_taosd.py @@ -60,7 +60,7 @@ class TDTestCase: def prepare_data(self): tdSql.execute("drop database if exists db ") - tdSql.execute("create database if not exists db duration 300") + tdSql.execute("create database if not exists db duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/2-query/abs.py b/tests/system-test/2-query/abs.py index 0eabd91535..db841af039 100644 --- a/tests/system-test/2-query/abs.py +++ b/tests/system-test/2-query/abs.py @@ -127,7 +127,7 @@ class TDTestCase: def prepare_tag_datas(self, dbname="testdb"): # prepare datas tdSql.execute( - f"create database if not exists {dbname} keep 3650 duration 1000 replica {self.replicaVar} ") + f"create database if not exists {dbname} keep 3650 duration 100 replica {self.replicaVar} ") tdSql.execute(" use testdb ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/and_or_for_byte.py b/tests/system-test/2-query/and_or_for_byte.py index 5b2fb51998..ca9c1f2bef 100644 --- a/tests/system-test/2-query/and_or_for_byte.py +++ b/tests/system-test/2-query/and_or_for_byte.py @@ -128,7 +128,7 @@ class TDTestCase: def prepare_tag_datas(self, dbname="testdb"): # prepare datas tdSql.execute( - f"create database if not exists {dbname} keep 3650 duration 1000") + f"create database if not exists {dbname} keep 3650 duration 100") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/countAlwaysReturnValue.py b/tests/system-test/2-query/countAlwaysReturnValue.py index bced89456e..a6a064ddfd 100644 --- a/tests/system-test/2-query/countAlwaysReturnValue.py +++ b/tests/system-test/2-query/countAlwaysReturnValue.py @@ -18,7 +18,7 @@ class TDTestCase: def prepare_data(self, dbname="db"): tdSql.execute( - f"create database if not exists {dbname} keep 3650 duration 1000") + f"create database if not exists {dbname} keep 3650 duration 100") tdSql.execute(f"use {dbname} ") tdSql.execute( f"create table {dbname}.tb (ts timestamp, c0 int)" diff --git a/tests/system-test/2-query/distribute_agg_apercentile.py b/tests/system-test/2-query/distribute_agg_apercentile.py index 23ca0b9fae..0a2f7ce45f 100644 --- a/tests/system-test/2-query/distribute_agg_apercentile.py +++ b/tests/system-test/2-query/distribute_agg_apercentile.py @@ -18,7 +18,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 s3_keeplocal 3000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 s3_keeplocal 3000 vgroups 5") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_avg.py b/tests/system-test/2-query/distribute_agg_avg.py index 1cd24103f8..497c3e9fac 100644 --- a/tests/system-test/2-query/distribute_agg_avg.py +++ b/tests/system-test/2-query/distribute_agg_avg.py @@ -35,7 +35,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_max.py b/tests/system-test/2-query/distribute_agg_max.py index fb91216c3e..53379ecbb3 100644 --- a/tests/system-test/2-query/distribute_agg_max.py +++ b/tests/system-test/2-query/distribute_agg_max.py @@ -38,7 +38,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_min.py b/tests/system-test/2-query/distribute_agg_min.py index 2667798640..01bc3da4a0 100644 --- a/tests/system-test/2-query/distribute_agg_min.py +++ b/tests/system-test/2-query/distribute_agg_min.py @@ -37,7 +37,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_spread.py b/tests/system-test/2-query/distribute_agg_spread.py index 0247a91861..8dc91f712a 100644 --- a/tests/system-test/2-query/distribute_agg_spread.py +++ b/tests/system-test/2-query/distribute_agg_spread.py @@ -37,7 +37,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname}") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_stddev.py b/tests/system-test/2-query/distribute_agg_stddev.py index 80bab3082d..f5383739ff 100644 --- a/tests/system-test/2-query/distribute_agg_stddev.py +++ b/tests/system-test/2-query/distribute_agg_stddev.py @@ -46,7 +46,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname}") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/distribute_agg_sum.py b/tests/system-test/2-query/distribute_agg_sum.py index da26fd58f9..fbe0221dd6 100644 --- a/tests/system-test/2-query/distribute_agg_sum.py +++ b/tests/system-test/2-query/distribute_agg_sum.py @@ -35,7 +35,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname}") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/function_null.py b/tests/system-test/2-query/function_null.py index e5056b7c56..712c98d48b 100644 --- a/tests/system-test/2-query/function_null.py +++ b/tests/system-test/2-query/function_null.py @@ -23,7 +23,7 @@ class TDTestCase: def prepare_tag_datas(self, dbname="testdb"): # prepare datas tdSql.execute( - f"create database if not exists {dbname} keep 3650 duration 1000") + f"create database if not exists {dbname} keep 3650 duration 100") tdSql.execute(f"use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 @@ -249,4 +249,4 @@ class TDTestCase: tdCases.addLinux(__file__, TDTestCase()) -tdCases.addWindows(__file__, TDTestCase()) \ No newline at end of file +tdCases.addWindows(__file__, TDTestCase()) diff --git a/tests/system-test/2-query/irate.py b/tests/system-test/2-query/irate.py index 82841541f0..69aa7f19fa 100644 --- a/tests/system-test/2-query/irate.py +++ b/tests/system-test/2-query/irate.py @@ -78,7 +78,7 @@ class TDTestCase: def prepare_tag_datas(self, dbname="testdb"): # prepare datas tdSql.execute( - f"create database if not exists {dbname} keep 3650 duration 1000") + f"create database if not exists {dbname} keep 3650 duration 100") tdSql.execute(f"use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/last_row.py b/tests/system-test/2-query/last_row.py index 395c754aa6..1534183056 100644 --- a/tests/system-test/2-query/last_row.py +++ b/tests/system-test/2-query/last_row.py @@ -61,7 +61,7 @@ class TDTestCase: def prepare_datas(self ,cache_value, dbname="db"): tdSql.execute(f"drop database if exists {dbname} ") - create_db_sql = f"create database if not exists {dbname} keep 3650 duration 1000 cachemodel {cache_value}" + create_db_sql = f"create database if not exists {dbname} keep 3650 duration 100 cachemodel {cache_value}" tdSql.execute(create_db_sql) tdSql.execute(f"use {dbname}") @@ -129,7 +129,7 @@ class TDTestCase: tdSql.execute(f"drop database if exists {dbname} ") # prepare datas - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 cachemodel {cache_value}") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 cachemodel {cache_value}") tdSql.execute(f"use {dbname} ") @@ -871,7 +871,7 @@ class TDTestCase: def initLastRowDelayTest(self, dbname="db"): tdSql.execute(f"drop database if exists {dbname} ") - create_db_sql = f"create database if not exists {dbname} keep 3650 duration 1000 cachemodel 'NONE' REPLICA 1" + create_db_sql = f"create database if not exists {dbname} keep 3650 duration 100 cachemodel 'NONE' REPLICA 1" tdSql.execute(create_db_sql) time.sleep(3) diff --git a/tests/system-test/2-query/sample.py b/tests/system-test/2-query/sample.py index a43c2e635e..efead7735b 100644 --- a/tests/system-test/2-query/sample.py +++ b/tests/system-test/2-query/sample.py @@ -611,7 +611,7 @@ class TDTestCase: def basic_sample_query(self, dbname="db"): tdSql.execute(f" drop database if exists {dbname} ") - tdSql.execute(f" create database if not exists {dbname} duration 300d ") + tdSql.execute(f" create database if not exists {dbname} duration 120d ") tdSql.execute( f'''create table {dbname}.stb1 (ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp) diff --git a/tests/system-test/2-query/twa.py b/tests/system-test/2-query/twa.py index 16b9779fa8..ebd439fd09 100644 --- a/tests/system-test/2-query/twa.py +++ b/tests/system-test/2-query/twa.py @@ -22,7 +22,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute( f'''create table {dbname}.stb1 (ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp,c11 int UNSIGNED, c12 bigint UNSIGNED, c13 smallint UNSIGNED, c14 tinyint UNSIGNED) diff --git a/tests/system-test/6-cluster/5dnode1mnode.py b/tests/system-test/6-cluster/5dnode1mnode.py index 61451f03b1..ae093ffb90 100644 --- a/tests/system-test/6-cluster/5dnode1mnode.py +++ b/tests/system-test/6-cluster/5dnode1mnode.py @@ -110,7 +110,7 @@ class TDTestCase: tdSql.error("drop mnode on dnode 1;") tdSql.execute("drop database if exists db") - tdSql.execute("create database if not exists db replica 1 duration 300") + tdSql.execute("create database if not exists db replica 1 duration 100") tdSql.execute("use db") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode2mnode.py b/tests/system-test/6-cluster/5dnode2mnode.py index ca7d6a58d5..aa9c3fc053 100644 --- a/tests/system-test/6-cluster/5dnode2mnode.py +++ b/tests/system-test/6-cluster/5dnode2mnode.py @@ -84,7 +84,7 @@ class TDTestCase: # fisrt add data : db\stable\childtable\general table tdSql.execute("drop database if exists db2") - tdSql.execute("create database if not exists db2 replica 1 duration 300") + tdSql.execute("create database if not exists db2 replica 1 duration 100") tdSql.execute("use db2") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeAdd1Ddnoe.py b/tests/system-test/6-cluster/5dnode3mnodeAdd1Ddnoe.py index f0f9c95566..e2cf0d3dd3 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeAdd1Ddnoe.py +++ b/tests/system-test/6-cluster/5dnode3mnodeAdd1Ddnoe.py @@ -73,8 +73,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti, 20) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeDrop.py b/tests/system-test/6-cluster/5dnode3mnodeDrop.py index aefa7a09f8..0ac28b2d16 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeDrop.py +++ b/tests/system-test/6-cluster/5dnode3mnodeDrop.py @@ -58,7 +58,7 @@ class TDTestCase: # fisrt add data : db\stable\childtable\general table for couti in count: tdSql.execute("drop database if exists db%d" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeDropInsert.py b/tests/system-test/6-cluster/5dnode3mnodeDropInsert.py index db183d80c1..26ead3dc2b 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeDropInsert.py +++ b/tests/system-test/6-cluster/5dnode3mnodeDropInsert.py @@ -78,8 +78,8 @@ class TDTestCase: for couti in range(dbcountStart,dbcountStop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table %s diff --git a/tests/system-test/6-cluster/5dnode3mnodeRecreateMnode.py b/tests/system-test/6-cluster/5dnode3mnodeRecreateMnode.py index 7af5982dec..2941a643fd 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeRecreateMnode.py +++ b/tests/system-test/6-cluster/5dnode3mnodeRecreateMnode.py @@ -72,8 +72,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeRestartDnodeInsertData.py b/tests/system-test/6-cluster/5dnode3mnodeRestartDnodeInsertData.py index 1691603472..1d2644c65f 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeRestartDnodeInsertData.py +++ b/tests/system-test/6-cluster/5dnode3mnodeRestartDnodeInsertData.py @@ -73,8 +73,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeRoll.py b/tests/system-test/6-cluster/5dnode3mnodeRoll.py index 11a153c48f..4816f976c6 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeRoll.py +++ b/tests/system-test/6-cluster/5dnode3mnodeRoll.py @@ -37,7 +37,7 @@ class TDTestCase: tdSql.init(conn.cursor()) self.host = socket.gethostname() self.replicaVar = int(replicaVar) - self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 300;use deldata; + self.deletedDataSql= '''drop database if exists deldata;create database deldata duration 100;use deldata; create table deldata.stb1 (ts timestamp, c1 int, c2 bigint, c3 smallint, c4 tinyint, c5 float, c6 double, c7 bool, c8 binary(16),c9 nchar(32), c10 timestamp) tags (t1 int); create table deldata.ct1 using deldata.stb1 tags ( 1 ); insert into deldata.ct1 values ( now()-0s, 0, 0, 0, 0, 0.0, 0.0, 0, 'binary0', 'nchar0', now()+0a ) ( now()-10s, 1, 11111, 111, 11, 1.11, 11.11, 1, 'binary1', 'nchar1', now()+1a ) ( now()-20s, 2, 22222, 222, 22, 2.22, 22.22, 0, 'binary2', 'nchar2', now()+2a ) ( now()-30s, 3, 33333, 333, 33, 3.33, 33.33, 1, 'binary3', 'nchar3', now()+3a ); @@ -140,8 +140,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py index fb62110b14..e89285c327 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeCreateDb.py @@ -71,8 +71,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeInsertData.py b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeInsertData.py index 7eaf756737..77892a1700 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeInsertData.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeInsertData.py @@ -72,8 +72,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeRCreateDb.py b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeRCreateDb.py index 27b15d4c99..c7af2d162f 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeRCreateDb.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopDnodeRCreateDb.py @@ -71,8 +71,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py index 9395dd2a2b..3e20721838 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopMnodeCreateStb.py @@ -71,8 +71,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py index 2fb196635f..adc8e8a313 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSep1VnodeStopVnodeCreateStb.py @@ -71,8 +71,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeSepVnodeStopDnodeCreateUser.py b/tests/system-test/6-cluster/5dnode3mnodeSepVnodeStopDnodeCreateUser.py index bcc7edf5cb..04526971d7 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeSepVnodeStopDnodeCreateUser.py +++ b/tests/system-test/6-cluster/5dnode3mnodeSepVnodeStopDnodeCreateUser.py @@ -73,8 +73,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/5dnode3mnodeStopInsert.py b/tests/system-test/6-cluster/5dnode3mnodeStopInsert.py index 9d2430506f..374381dc18 100644 --- a/tests/system-test/6-cluster/5dnode3mnodeStopInsert.py +++ b/tests/system-test/6-cluster/5dnode3mnodeStopInsert.py @@ -77,8 +77,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - tdLog.debug("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + tdLog.debug("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDataRebootAlterRep1-3.py b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDataRebootAlterRep1-3.py index 0d3b920bb4..c583149ce6 100644 --- a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDataRebootAlterRep1-3.py +++ b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDataRebootAlterRep1-3.py @@ -70,8 +70,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDatarRebootAlterRep1-3.py b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDatarRebootAlterRep1-3.py index 0d3b920bb4..c583149ce6 100644 --- a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDatarRebootAlterRep1-3.py +++ b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertDatarRebootAlterRep1-3.py @@ -70,8 +70,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py index 9ab47764c8..c817756edc 100644 --- a/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py +++ b/tests/system-test/6-cluster/manually-test/6dnode3mnodeInsertLessDataAlterRep3to1to3.py @@ -70,8 +70,8 @@ class TDTestCase: for couti in range(countstart,countstop): tdLog.debug("drop database if exists db%d" %couti) tdSql.execute("drop database if exists db%d" %couti) - print("create database if not exists db%d replica 1 duration 300" %couti) - tdSql.execute("create database if not exists db%d replica 1 duration 300" %couti) + print("create database if not exists db%d replica 1 duration 100" %couti) + tdSql.execute("create database if not exists db%d replica 1 duration 100" %couti) tdSql.execute("use db%d" %couti) tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py index 52d675208b..fb00fc0846 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_createDb_replica1.py @@ -83,7 +83,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py index 9cc97543ad..51923f56a9 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas.py @@ -88,7 +88,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py index 4ea00ff2e2..6567b1024c 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica1_insertdatas_querys.py @@ -89,7 +89,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py index 51da6fc723..db45582c3b 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas.py @@ -88,7 +88,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py index a111e0bab5..64809a269b 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys.py @@ -89,7 +89,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_all_vnode.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_all_vnode.py index 66eca7143d..3d061d4f63 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_all_vnode.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_all_vnode.py @@ -91,7 +91,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_follower.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_follower.py index db9139dca2..b573d8eafa 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_follower.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_follower.py @@ -91,7 +91,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") time.sleep(3) tdSql.execute("use test") tdSql.execute( diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_leader.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_leader.py index 4fc4507c3f..049464b539 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_leader.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_querys_loop_restart_leader.py @@ -91,7 +91,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") time.sleep(3) tdSql.execute( diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_sync.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_sync.py index eb77c6d003..b5db868e68 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_sync.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_sync.py @@ -97,7 +97,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync.py index 9079bedb7c..31b8fd2326 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync.py @@ -97,7 +97,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync_force_stop.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync_force_stop.py index 35cbceb268..d7a161263e 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync_force_stop.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_follower_unsync_force_stop.py @@ -97,7 +97,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_leader_forece_stop.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_leader_forece_stop.py index bf2ebadd06..82c9dbf86c 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_leader_forece_stop.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_insertdatas_stop_leader_forece_stop.py @@ -180,7 +180,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_mnode3_insertdatas_querys.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_mnode3_insertdatas_querys.py index 25aba29235..7f8c75fa03 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_mnode3_insertdatas_querys.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_mnode3_insertdatas_querys.py @@ -89,7 +89,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py index 45ceb73059..4f3b2e2def 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups.py @@ -88,7 +88,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 diff --git a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups_stopOne.py b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups_stopOne.py index 3f72f33951..e136517a4f 100644 --- a/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups_stopOne.py +++ b/tests/system-test/6-cluster/vnode/4dnode1mnode_basic_replica3_vgroups_stopOne.py @@ -91,7 +91,7 @@ class TDTestCase: def create_db_check_vgroups(self): tdSql.execute("drop database if exists test") - tdSql.execute("create database if not exists test replica 1 duration 300") + tdSql.execute("create database if not exists test replica 1 duration 100") tdSql.execute("use test") tdSql.execute( '''create table stb1 From cd8674f96fd8fa36d2bf030544ff0effe245f1df Mon Sep 17 00:00:00 2001 From: Shengliang Guan Date: Thu, 24 Oct 2024 09:53:23 +0800 Subject: [PATCH 083/102] fix: format codes --- cmake/cmake.define | 258 +++++++++--------- cmake/taosadapter_CMakeLists.txt.in | 2 +- cmake/taostools_CMakeLists.txt.in | 2 +- contrib/CMakeLists.txt | 244 +++++++++-------- contrib/azure-cmake/CMakeLists.txt | 41 +-- contrib/test/azure/CMakeLists.txt | 13 +- .../01-components/03-taosadapter.md | 2 +- docs/zh/14-reference/03-taos-sql/14-stream.md | 2 +- 8 files changed, 295 insertions(+), 269 deletions(-) diff --git a/cmake/cmake.define b/cmake/cmake.define index 8c0e9dc573..8b762011a4 100644 --- a/cmake/cmake.define +++ b/cmake/cmake.define @@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.0) set(CMAKE_VERBOSE_MAKEFILE FALSE) set(TD_BUILD_TAOSA_INTERNAL FALSE) -#set output directory +# set output directory SET(LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}/build/lib) SET(EXECUTABLE_OUTPUT_PATH ${PROJECT_BINARY_DIR}/build/bin) SET(TD_TESTS_OUTPUT_DIR ${PROJECT_BINARY_DIR}/test) @@ -12,170 +12,178 @@ MESSAGE(STATUS "Project binary files output path: " ${PROJECT_BINARY_DIR}) MESSAGE(STATUS "Project executable files output path: " ${EXECUTABLE_OUTPUT_PATH}) MESSAGE(STATUS "Project library files output path: " ${LIBRARY_OUTPUT_PATH}) -if (NOT DEFINED TD_GRANT) - SET(TD_GRANT FALSE) -endif() +IF(NOT DEFINED TD_GRANT) + SET(TD_GRANT FALSE) +ENDIF() -IF (NOT DEFINED BUILD_WITH_RAND_ERR) - SET(BUILD_WITH_RAND_ERR FALSE) -ELSE () - SET(BUILD_WITH_RAND_ERR TRUE) -endif() +IF(NOT DEFINED BUILD_WITH_RAND_ERR) + SET(BUILD_WITH_RAND_ERR FALSE) +ELSE() + SET(BUILD_WITH_RAND_ERR TRUE) +ENDIF() -IF ("${WEBSOCKET}" MATCHES "true") +IF("${WEBSOCKET}" MATCHES "true") SET(TD_WEBSOCKET TRUE) MESSAGE("Enable websocket") ADD_DEFINITIONS(-DWEBSOCKET) -ELSE () +ELSE() SET(TD_WEBSOCKET FALSE) -ENDIF () +ENDIF() -IF ("${BUILD_HTTP}" STREQUAL "") - IF (TD_LINUX) - IF (TD_ARM_32) - SET(TD_BUILD_HTTP TRUE) - ELSE () - SET(TD_BUILD_HTTP TRUE) - ENDIF () - ELSEIF (TD_DARWIN) +IF("${BUILD_HTTP}" STREQUAL "") + IF(TD_LINUX) + IF(TD_ARM_32) + SET(TD_BUILD_HTTP TRUE) + ELSE() + SET(TD_BUILD_HTTP TRUE) + ENDIF() + ELSEIF(TD_DARWIN) + SET(TD_BUILD_HTTP TRUE) + ELSE() + SET(TD_BUILD_HTTP TRUE) + ENDIF() +ELSEIF(${BUILD_HTTP} MATCHES "false") + SET(TD_BUILD_HTTP FALSE) +ELSEIF(${BUILD_HTTP} MATCHES "true") SET(TD_BUILD_HTTP TRUE) - ELSE () +ELSEIF(${BUILD_HTTP} MATCHES "internal") + SET(TD_BUILD_HTTP FALSE) + SET(TD_BUILD_TAOSA_INTERNAL TRUE) +ELSE() SET(TD_BUILD_HTTP TRUE) - ENDIF () -ELSEIF (${BUILD_HTTP} MATCHES "false") - SET(TD_BUILD_HTTP FALSE) -ELSEIF (${BUILD_HTTP} MATCHES "true") - SET(TD_BUILD_HTTP TRUE) -ELSEIF (${BUILD_HTTP} MATCHES "internal") - SET(TD_BUILD_HTTP FALSE) - SET(TD_BUILD_TAOSA_INTERNAL TRUE) -ELSE () - SET(TD_BUILD_HTTP TRUE) -ENDIF () +ENDIF() -IF (TD_BUILD_HTTP) - ADD_DEFINITIONS(-DHTTP_EMBEDDED) -ENDIF () +IF(TD_BUILD_HTTP) + ADD_DEFINITIONS(-DHTTP_EMBEDDED) +ENDIF() -IF ("${BUILD_TOOLS}" STREQUAL "") - IF (TD_LINUX) - IF (TD_ARM_32) - SET(BUILD_TOOLS "false") - ELSEIF (TD_ARM_64) - SET(BUILD_TOOLS "false") - ELSE () - SET(BUILD_TOOLS "false") - ENDIF () - ELSEIF (TD_DARWIN) - SET(BUILD_TOOLS "false") - ELSE () - SET(BUILD_TOOLS "false") - ENDIF () -ENDIF () +IF("${BUILD_TOOLS}" STREQUAL "") + IF(TD_LINUX) + IF(TD_ARM_32) + SET(BUILD_TOOLS "false") + ELSEIF(TD_ARM_64) + SET(BUILD_TOOLS "false") + ELSE() + SET(BUILD_TOOLS "false") + ENDIF() + ELSEIF(TD_DARWIN) + SET(BUILD_TOOLS "false") + ELSE() + SET(BUILD_TOOLS "false") + ENDIF() +ENDIF() -IF ("${BUILD_TOOLS}" MATCHES "false") +IF("${BUILD_TOOLS}" MATCHES "false") MESSAGE("${Yellow} Will _not_ build taos_tools! ${ColourReset}") SET(TD_TAOS_TOOLS FALSE) -ELSE () +ELSE() MESSAGE("") MESSAGE("${Green} Will build taos_tools! ${ColourReset}") MESSAGE("") SET(TD_TAOS_TOOLS TRUE) -ENDIF () +ENDIF() -IF (${TD_WINDOWS}) +IF(${TD_WINDOWS}) SET(TAOS_LIB taos_static) -ELSE () +ELSE() SET(TAOS_LIB taos) -ENDIF () +ENDIF() # build TSZ by default -IF ("${TSZ_ENABLED}" MATCHES "false") - set(VAR_TSZ "" CACHE INTERNAL "global variant empty" ) +IF("${TSZ_ENABLED}" MATCHES "false") + set(VAR_TSZ "" CACHE INTERNAL "global variant empty") ELSE() - # define add - MESSAGE(STATUS "build with TSZ enabled") - ADD_DEFINITIONS(-DTD_TSZ) - set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz" ) + # define add + MESSAGE(STATUS "build with TSZ enabled") + ADD_DEFINITIONS(-DTD_TSZ) + set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz") ENDIF() # force set all platform to JEMALLOC_ENABLED = false SET(JEMALLOC_ENABLED OFF) -IF (TD_WINDOWS) - MESSAGE("${Yellow} set compiler flag for Windows! ${ColourReset}") - IF (${CMAKE_BUILD_TYPE} MATCHES "Release") - MESSAGE("${Green} will build Release version! ${ColourReset}") - SET(COMMON_FLAGS "/W3 /D_WIN32 /DWIN32 /Zi- /O2 /GL /MD") - ELSE () - MESSAGE("${Green} will build Debug version! ${ColourReset}") - SET(COMMON_FLAGS "/w /D_WIN32 /DWIN32 /Zi /MTd") +IF(TD_WINDOWS) + MESSAGE("${Yellow} set compiler flag for Windows! ${ColourReset}") + + IF(${CMAKE_BUILD_TYPE} MATCHES "Release") + MESSAGE("${Green} will build Release version! ${ColourReset}") + SET(COMMON_FLAGS "/W3 /D_WIN32 /DWIN32 /Zi- /O2 /GL /MD") + + ELSE() + MESSAGE("${Green} will build Debug version! ${ColourReset}") + SET(COMMON_FLAGS "/w /D_WIN32 /DWIN32 /Zi /MTd") ENDIF() SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /MANIFEST:NO") + # IF (MSVC AND (MSVC_VERSION GREATER_EQUAL 1900)) - # SET(COMMON_FLAGS "${COMMON_FLAGS} /Wv:18") + # SET(COMMON_FLAGS "${COMMON_FLAGS} /Wv:18") # ENDIF () - IF (CMAKE_DEPFILE_FLAGS_C) + IF(CMAKE_DEPFILE_FLAGS_C) SET(CMAKE_DEPFILE_FLAGS_C "") - ENDIF () - IF (CMAKE_DEPFILE_FLAGS_CXX) + ENDIF() + + IF(CMAKE_DEPFILE_FLAGS_CXX) SET(CMAKE_DEPFILE_FLAGS_CXX "") - ENDIF () - IF (CMAKE_C_FLAGS_DEBUG) + ENDIF() + + IF(CMAKE_C_FLAGS_DEBUG) SET(CMAKE_C_FLAGS_DEBUG "" CACHE STRING "" FORCE) - ENDIF () - IF (CMAKE_CXX_FLAGS_DEBUG) + ENDIF() + + IF(CMAKE_CXX_FLAGS_DEBUG) SET(CMAKE_CXX_FLAGS_DEBUG "" CACHE STRING "" FORCE) - ENDIF () + ENDIF() SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMMON_FLAGS}") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMMON_FLAGS}") -ELSE () - IF (${TD_DARWIN}) +ELSE() + IF(${TD_DARWIN}) set(CMAKE_MACOSX_RPATH 0) - ENDIF () - IF (${COVER} MATCHES "true") + ENDIF() + + IF(${COVER} MATCHES "true") MESSAGE(STATUS "Test coverage mode, add extra flags") SET(GCC_COVERAGE_COMPILE_FLAGS "-fprofile-arcs -ftest-coverage") - SET(GCC_COVERAGE_LINK_FLAGS "-lgcov --coverage") + SET(GCC_COVERAGE_LINK_FLAGS "-lgcov --coverage") SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} ${GCC_COVERAGE_LINK_FLAGS}") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} ${GCC_COVERAGE_LINK_FLAGS}") - ENDIF () + ENDIF() # disable all assert - IF ((${DISABLE_ASSERT} MATCHES "true") OR (${DISABLE_ASSERTS} MATCHES "true")) + IF((${DISABLE_ASSERT} MATCHES "true") OR(${DISABLE_ASSERTS} MATCHES "true")) ADD_DEFINITIONS(-DDISABLE_ASSERT) MESSAGE(STATUS "Disable all asserts") ENDIF() INCLUDE(CheckCCompilerFlag) - IF (TD_ARM_64 OR TD_ARM_32) + + IF(TD_ARM_64 OR TD_ARM_32) SET(COMPILER_SUPPORT_SSE42 false) - ELSEIF (("${CMAKE_C_COMPILER_ID}" MATCHES "Clang") OR ("${CMAKE_C_COMPILER_ID}" MATCHES "AppleClang")) + ELSEIF(("${CMAKE_C_COMPILER_ID}" MATCHES "Clang") OR("${CMAKE_C_COMPILER_ID}" MATCHES "AppleClang")) SET(COMPILER_SUPPORT_SSE42 true) MESSAGE(STATUS "Always enable sse4.2 for Clang/AppleClang") ELSE() CHECK_C_COMPILER_FLAG("-msse4.2" COMPILER_SUPPORT_SSE42) ENDIF() - IF (TD_ARM_64 OR TD_ARM_32) - SET(COMPILER_SUPPORT_FMA false) - SET(COMPILER_SUPPORT_AVX false) - SET(COMPILER_SUPPORT_AVX2 false) - SET(COMPILER_SUPPORT_AVX512F false) - SET(COMPILER_SUPPORT_AVX512BMI false) - SET(COMPILER_SUPPORT_AVX512VL false) + IF(TD_ARM_64 OR TD_ARM_32) + SET(COMPILER_SUPPORT_FMA false) + SET(COMPILER_SUPPORT_AVX false) + SET(COMPILER_SUPPORT_AVX2 false) + SET(COMPILER_SUPPORT_AVX512F false) + SET(COMPILER_SUPPORT_AVX512BMI false) + SET(COMPILER_SUPPORT_AVX512VL false) ELSE() - CHECK_C_COMPILER_FLAG("-mfma" COMPILER_SUPPORT_FMA) - CHECK_C_COMPILER_FLAG("-mavx512f" COMPILER_SUPPORT_AVX512F) - CHECK_C_COMPILER_FLAG("-mavx512vbmi" COMPILER_SUPPORT_AVX512BMI) - CHECK_C_COMPILER_FLAG("-mavx512vl" COMPILER_SUPPORT_AVX512VL) + CHECK_C_COMPILER_FLAG("-mfma" COMPILER_SUPPORT_FMA) + CHECK_C_COMPILER_FLAG("-mavx512f" COMPILER_SUPPORT_AVX512F) + CHECK_C_COMPILER_FLAG("-mavx512vbmi" COMPILER_SUPPORT_AVX512BMI) + CHECK_C_COMPILER_FLAG("-mavx512vl" COMPILER_SUPPORT_AVX512VL) - INCLUDE(CheckCSourceRuns) - SET(CMAKE_REQUIRED_FLAGS "-mavx") - check_c_source_runs(" + INCLUDE(CheckCSourceRuns) + SET(CMAKE_REQUIRED_FLAGS "-mavx") + check_c_source_runs(" #include int main() { __m256d a, b, c; @@ -185,7 +193,7 @@ ELSE () c = _mm256_add_pd(a, b); _mm256_storeu_pd(buf, c); for (int i = 0; i < sizeof(buf) / sizeof(buf[0]); ++i) { - if (buf[i] != 0) { + IF (buf[i] != 0) { return 1; } } @@ -193,8 +201,8 @@ ELSE () } " COMPILER_SUPPORT_AVX) - SET(CMAKE_REQUIRED_FLAGS "-mavx2") - check_c_source_runs(" + SET(CMAKE_REQUIRED_FLAGS "-mavx2") + check_c_source_runs(" #include int main() { __m256i a, b, c; @@ -204,7 +212,7 @@ ELSE () c = _mm256_and_si256(a, b); _mm256_storeu_si256((__m256i *)buf, c); for (int i = 0; i < sizeof(buf) / sizeof(buf[0]); ++i) { - if (buf[i] != 0) { + IF (buf[i] != 0) { return 1; } } @@ -213,40 +221,42 @@ ELSE () " COMPILER_SUPPORT_AVX2) ENDIF() - IF (COMPILER_SUPPORT_SSE42) + IF(COMPILER_SUPPORT_SSE42) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -msse4.2") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2") ENDIF() - IF ("${SIMD_SUPPORT}" MATCHES "true") - IF (COMPILER_SUPPORT_FMA) + IF("${SIMD_SUPPORT}" MATCHES "true") + IF(COMPILER_SUPPORT_FMA) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mfma") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfma") MESSAGE(STATUS "FMA instructions is ACTIVATED") ENDIF() - IF (COMPILER_SUPPORT_AVX) + + IF(COMPILER_SUPPORT_AVX) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx") MESSAGE(STATUS "AVX instructions is ACTIVATED") ENDIF() - IF (COMPILER_SUPPORT_AVX2) + + IF(COMPILER_SUPPORT_AVX2) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx2") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx2") MESSAGE(STATUS "AVX2 instructions is ACTIVATED") ENDIF() ENDIF() - IF ("${SIMD_AVX512_SUPPORT}" MATCHES "true") - IF (COMPILER_SUPPORT_AVX512F AND COMPILER_SUPPORT_AVX512BMI) + IF("${SIMD_AVX512_SUPPORT}" MATCHES "true") + IF(COMPILER_SUPPORT_AVX512F AND COMPILER_SUPPORT_AVX512BMI) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx512f -mavx512vbmi") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx512f -mavx512vbmi") MESSAGE(STATUS "avx512f/avx512bmi enabled by compiler") ENDIF() - IF (COMPILER_SUPPORT_AVX512VL) + IF(COMPILER_SUPPORT_AVX512VL) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx512vl") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx512vl") - MESSAGE(STATUS "avx512vl enabled by compiler") + MESSAGE(STATUS "avx512vl enabled by compiler") ENDIF() ENDIF() @@ -254,17 +264,17 @@ ELSE () SET(CMAKE_C_FLAGS_REL "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -O3 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k") SET(CMAKE_CXX_FLAGS_REL "${CMAKE_CXX_FLAGS} -Werror -Wno-reserved-user-defined-literal -Wno-literal-suffix -Werror=return-type -fPIC -O3 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k") - IF (${BUILD_SANITIZER}) + IF(${BUILD_SANITIZER}) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") - #SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") - SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") - MESSAGE(STATUS "Compile with Address Sanitizer!") - ELSEIF (${BUILD_RELEASE}) + + # SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") + SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0") + MESSAGE(STATUS "Compile with Address Sanitizer!") + ELSEIF(${BUILD_RELEASE}) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}") - ELSE () + ELSE() SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -g3 -gdwarf-2 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-reserved-user-defined-literal -g3 -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k") - ENDIF () - -ENDIF () + ENDIF() +ENDIF() diff --git a/cmake/taosadapter_CMakeLists.txt.in b/cmake/taosadapter_CMakeLists.txt.in index ef6ed4af1d..13826a1a74 100644 --- a/cmake/taosadapter_CMakeLists.txt.in +++ b/cmake/taosadapter_CMakeLists.txt.in @@ -2,7 +2,7 @@ # taosadapter ExternalProject_Add(taosadapter GIT_REPOSITORY https://github.com/taosdata/taosadapter.git - GIT_TAG 3.0 + GIT_TAG main SOURCE_DIR "${TD_SOURCE_DIR}/tools/taosadapter" BINARY_DIR "" #BUILD_IN_SOURCE TRUE diff --git a/cmake/taostools_CMakeLists.txt.in b/cmake/taostools_CMakeLists.txt.in index 9a6a5329ae..9bbda8309f 100644 --- a/cmake/taostools_CMakeLists.txt.in +++ b/cmake/taostools_CMakeLists.txt.in @@ -2,7 +2,7 @@ # taos-tools ExternalProject_Add(taos-tools GIT_REPOSITORY https://github.com/taosdata/taos-tools.git - GIT_TAG 3.0 + GIT_TAG main SOURCE_DIR "${TD_SOURCE_DIR}/tools/taos-tools" BINARY_DIR "" #BUILD_IN_SOURCE TRUE diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 2a7bf84245..eae697560b 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -10,39 +10,36 @@ if(${BUILD_WITH_S3}) file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.2/) elseif(${BUILD_WITH_COS}) + set(CONTRIB_TMP_FILE3 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in3") + configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) -set(CONTRIB_TMP_FILE3 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in3") -configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) + if(${BUILD_WITH_COS}) + file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.1/) + cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) + cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) + cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) + endif(${BUILD_WITH_COS}) -if(${BUILD_WITH_COS}) - file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.1/) - cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) - cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) - cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3}) -endif(${BUILD_WITH_COS}) - -configure_file(${CONTRIB_TMP_FILE3} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt") -execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . + configure_file(${CONTRIB_TMP_FILE3} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt") + execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") -execute_process(COMMAND "${CMAKE_COMMAND}" --build . + execute_process(COMMAND "${CMAKE_COMMAND}" --build . WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") -set(CONTRIB_TMP_FILE2 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in2") -configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2}) + set(CONTRIB_TMP_FILE2 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in2") + configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2}) -if(${BUILD_WITH_COS}) - cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2}) -endif(${BUILD_WITH_COS}) + if(${BUILD_WITH_COS}) + cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2}) + endif(${BUILD_WITH_COS}) -configure_file(${CONTRIB_TMP_FILE2} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt") -execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . + configure_file(${CONTRIB_TMP_FILE2} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt") + execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") -execute_process(COMMAND "${CMAKE_COMMAND}" --build . + execute_process(COMMAND "${CMAKE_COMMAND}" --build . WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") - endif() - set(CONTRIB_TMP_FILE "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in") configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) @@ -59,7 +56,7 @@ endif() # taosadapter if(${BUILD_HTTP}) MESSAGE("BUILD_HTTP is on") -else () +else() MESSAGE("BUILD_HTTP is off, use taosAdapter") cat("${TD_SUPPORT_DIR}/taosadapter_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) endif() @@ -110,19 +107,18 @@ cat("${TD_SUPPORT_DIR}/zlib_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) cat("${TD_SUPPORT_DIR}/cjson_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) # xz -#cat("${TD_SUPPORT_DIR}/xz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) +# cat("${TD_SUPPORT_DIR}/xz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) -#lzma2 +# lzma2 cat("${TD_SUPPORT_DIR}/lzma_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) - -if (${BUILD_CONTRIB}) +if(${BUILD_CONTRIB}) if(${BUILD_WITH_ROCKSDB}) cat("${TD_SUPPORT_DIR}/rocksdb_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) add_definitions(-DUSE_ROCKSDB) endif() else() - if (NOT ${TD_LINUX}) + if(NOT ${TD_LINUX}) if(${BUILD_WITH_ROCKSDB}) cat("${TD_SUPPORT_DIR}/rocksdb_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) add_definitions(-DUSE_ROCKSDB) @@ -134,9 +130,9 @@ else() endif() endif() -#cat("${TD_SUPPORT_DIR}/zstd_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) +# cat("${TD_SUPPORT_DIR}/zstd_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) -#libuv +# libuv if(${BUILD_WITH_UV}) cat("${TD_SUPPORT_DIR}/libuv_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) endif(${BUILD_WITH_UV}) @@ -157,13 +153,12 @@ if(${BUILD_WITH_S3}) # cos elseif(${BUILD_WITH_COS}) - #cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) - #cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) - #cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) - #cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) + # cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) + # cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) + # cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) + # cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) cat("${TD_SUPPORT_DIR}/cos_CMakeLists.txt.in" ${CONTRIB_TMP_FILE}) add_definitions(-DUSE_COS) - endif() # crashdump @@ -192,9 +187,9 @@ endif() # download dependencies configure_file(${CONTRIB_TMP_FILE} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt") execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" . - WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") + WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") execute_process(COMMAND "${CMAKE_COMMAND}" --build . - WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") + WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download") # ================================================================================================ # Build @@ -207,25 +202,27 @@ if(${BUILD_TEST}) gtest PUBLIC $ ) + if(${TD_WINDOWS}) target_include_directories( gtest PUBLIC $ ) endif(${TD_WINDOWS}) + if(${TD_LINUX}) target_include_directories( gtest PUBLIC $ ) endif(${TD_LINUX}) + if(${TD_DARWIN}) target_include_directories( gtest PUBLIC $ ) endif(${TD_DARWIN}) - endif(${BUILD_TEST}) # cJson @@ -237,15 +234,16 @@ option(CJSON_BUILD_SHARED_LIBS "Overrides BUILD_SHARED_LIBS if CJSON_OVERRIDE_BU add_subdirectory(cJson EXCLUDE_FROM_ALL) target_include_directories( cjson + # see https://stackoverflow.com/questions/25676277/cmake-target-include-directories-prints-an-error-when-i-try-to-add-the-source PUBLIC $ ) unset(CMAKE_PROJECT_INCLUDE_BEFORE) # xml2 -#if(${BUILD_WITH_S3}) -# add_subdirectory(xml2 EXCLUDE_FROM_ALL) -#endif() +# if(${BUILD_WITH_S3}) +# add_subdirectory(xml2 EXCLUDE_FROM_ALL) +# endif() # lz4 add_subdirectory(lz4/build/cmake EXCLUDE_FROM_ALL) @@ -256,10 +254,12 @@ target_include_directories( # zlib set(CMAKE_PROJECT_INCLUDE_BEFORE "${TD_SUPPORT_DIR}/EnableCMP0048.txt.in") + if(${TD_DARWIN}) SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=deprecated-non-prototype") SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=deprecated-non-prototype") endif(${TD_DARWIN}) + add_subdirectory(zlib EXCLUDE_FROM_ALL) target_include_directories( zlibstatic @@ -275,9 +275,9 @@ unset(CMAKE_PROJECT_INCLUDE_BEFORE) # add_subdirectory(xz EXCLUDE_FROM_ALL) # target_include_directories( -# xz -# PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/xz -# PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/xz +# xz +# PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/xz +# PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/xz # ) # leveldb @@ -292,24 +292,27 @@ endif(${BUILD_WITH_LEVELDB}) # rocksdb # To support rocksdb build on ubuntu: sudo apt-get install libgflags-dev -if (${BUILD_WITH_UV}) +if(${BUILD_WITH_UV}) if(${TD_LINUX}) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}") - IF ("${CMAKE_BUILD_TYPE}" STREQUAL "") + + if("${CMAKE_BUILD_TYPE}" STREQUAL "") SET(CMAKE_BUILD_TYPE Release) endif() endif(${TD_LINUX}) -endif (${BUILD_WITH_UV}) +endif(${BUILD_WITH_UV}) -if (${BUILD_WITH_ROCKSDB}) - if (${BUILD_CONTRIB}) +if(${BUILD_WITH_ROCKSDB}) + if(${BUILD_CONTRIB}) if(${TD_LINUX}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL} -Wno-error=maybe-uninitialized -Wno-error=unused-but-set-variable -Wno-error=unused-variable -Wno-error=unused-function -Wno-errno=unused-private-field -Wno-error=unused-result") - if ("${CMAKE_BUILD_TYPE}" STREQUAL "") + + if("${CMAKE_BUILD_TYPE}" STREQUAL "") SET(CMAKE_BUILD_TYPE Release) endif() endif(${TD_LINUX}) + MESSAGE(STATUS "ROCKSDB CXXXX STATUS CONFIG: " ${CMAKE_CXX_FLAGS}) MESSAGE(STATUS "ROCKSDB C STATUS CONFIG: " ${CMAKE_C_FLAGS}) @@ -317,22 +320,23 @@ if (${BUILD_WITH_ROCKSDB}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=maybe-uninitialized") endif(${TD_DARWIN}) - if (${TD_DARWIN_ARM64}) + if(${TD_DARWIN_ARM64}) set(HAS_ARMV8_CRC true) endif(${TD_DARWIN_ARM64}) - if (${TD_WINDOWS}) + if(${TD_WINDOWS}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819 /std:c++17") SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4244 /wd4819") option(WITH_JNI "" OFF) + if(CMAKE_C_FLAGS MATCHES "/MT" OR CMAKE_C_FLAGS MATCHES "/MTd") - message("Rocksdb build runtime lib use /MT or /MTd") - option(WITH_MD_LIBRARY "build with MD" OFF) + message("Rocksdb build runtime lib use /MT or /MTd") + option(WITH_MD_LIBRARY "build with MD" OFF) endif() + set(SYSTEM_LIBS ${SYSTEM_LIBS} shlwapi.lib rpcrt4.lib) endif(${TD_WINDOWS}) - if(${TD_DARWIN}) option(HAVE_THREAD_LOCAL "" OFF) option(WITH_IOSTATS_CONTEXT "" OFF) @@ -358,30 +362,32 @@ if (${BUILD_WITH_ROCKSDB}) PUBLIC $ ) else() - if (NOT ${TD_LINUX}) + if(NOT ${TD_LINUX}) MESSAGE(STATUS "ROCKSDB CXX STATUS CONFIG: " ${CMAKE_CXX_FLAGS}) MESSAGE(STATUS "ROCKSDB C STATUS CONFIG: " ${CMAKE_C_FLAGS}) + if(${TD_DARWIN}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=maybe-uninitialized") SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=maybe-uninitialized") endif(${TD_DARWIN}) - if (${TD_DARWIN_ARM64}) + if(${TD_DARWIN_ARM64}) set(HAS_ARMV8_CRC true) endif(${TD_DARWIN_ARM64}) - if (${TD_WINDOWS}) + if(${TD_WINDOWS}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819 /std:c++17") SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4244 /wd4819") option(WITH_JNI "" OFF) + if(CMAKE_C_FLAGS MATCHES "/MT" OR CMAKE_C_FLAGS MATCHES "/MTd") message("Rocksdb build runtime lib use /MT or /MTd") option(WITH_MD_LIBRARY "build with MD" OFF) endif() + set(SYSTEM_LIBS ${SYSTEM_LIBS} shlwapi.lib rpcrt4.lib) endif(${TD_WINDOWS}) - if(${TD_DARWIN}) option(HAVE_THREAD_LOCAL "" OFF) option(WITH_IOSTATS_CONTEXT "" OFF) @@ -407,44 +413,44 @@ if (${BUILD_WITH_ROCKSDB}) PUBLIC $ ) endif() - endif() endif() if(${BUILD_WITH_S3}) INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.2/include) MESSAGE("build with s3: ${BUILD_WITH_S3}") + # cos elseif(${BUILD_WITH_COS}) - if(${TD_LINUX}) - set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.1) - #ADD_DEFINITIONS(-DMINIXML_LIBRARY=${CMAKE_BINARY_DIR}/build/lib/libxml.a) - option(ENABLE_TEST "Enable the tests" OFF) - INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.1/include) - #MESSAGE("$ENV{HOME}/.cos-local.1/include") + if(${TD_LINUX}) + set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.1) - set(CMAKE_BUILD_TYPE Release) - set(ORIG_CMAKE_PROJECT_NAME ${CMAKE_PROJECT_NAME}) - set(CMAKE_PROJECT_NAME cos_c_sdk) + # ADD_DEFINITIONS(-DMINIXML_LIBRARY=${CMAKE_BINARY_DIR}/build/lib/libxml.a) + option(ENABLE_TEST "Enable the tests" OFF) + INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.1/include) - add_subdirectory(cos-c-sdk-v5 EXCLUDE_FROM_ALL) - target_include_directories( - cos_c_sdk - PUBLIC $ - ) + # MESSAGE("$ENV{HOME}/.cos-local.1/include") + set(CMAKE_BUILD_TYPE Release) + set(ORIG_CMAKE_PROJECT_NAME ${CMAKE_PROJECT_NAME}) + set(CMAKE_PROJECT_NAME cos_c_sdk) - set(CMAKE_PROJECT_NAME ${ORIG_CMAKE_PROJECT_NAME}) - else() - - endif(${TD_LINUX}) + add_subdirectory(cos-c-sdk-v5 EXCLUDE_FROM_ALL) + target_include_directories( + cos_c_sdk + PUBLIC $ + ) + set(CMAKE_PROJECT_NAME ${ORIG_CMAKE_PROJECT_NAME}) + else() + endif(${TD_LINUX}) endif() # pthread if(${BUILD_PTHREAD}) - if ("${CMAKE_BUILD_TYPE}" STREQUAL "") - SET(CMAKE_BUILD_TYPE Release) + if("${CMAKE_BUILD_TYPE}" STREQUAL "") + SET(CMAKE_BUILD_TYPE Release) endif() + add_definitions(-DPTW32_STATIC_LIB) add_subdirectory(pthread EXCLUDE_FROM_ALL) set_target_properties(libpthreadVC3 PROPERTIES OUTPUT_NAME pthread) @@ -452,16 +458,15 @@ if(${BUILD_PTHREAD}) target_link_libraries(pthread INTERFACE libpthreadVC3) endif() - # jemalloc if(${JEMALLOC_ENABLED}) include(ExternalProject) ExternalProject_Add(jemalloc - PREFIX "jemalloc" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ./autogen.sh COMMAND ./configure --prefix=${CMAKE_BINARY_DIR}/build/ --disable-initial-exec-tls - BUILD_COMMAND ${MAKE} + PREFIX "jemalloc" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND ./autogen.sh COMMAND ./configure --prefix=${CMAKE_BINARY_DIR}/build/ --disable-initial-exec-tls + BUILD_COMMAND ${MAKE} ) INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/build/include) endif() @@ -515,12 +520,13 @@ endif(${BUILD_WCWIDTH}) # LIBUV if(${BUILD_WITH_UV}) - if (TD_WINDOWS) + if(TD_WINDOWS) # There is no GetHostNameW function on win7. file(READ "libuv/src/win/util.c" LIBUV_WIN_UTIL_CONTENT) string(REPLACE "if (GetHostNameW(buf, UV_MAXHOSTNAMESIZE" "DWORD nSize = UV_MAXHOSTNAMESIZE;\n if (GetComputerNameW(buf, &nSize" LIBUV_WIN_UTIL_CONTENT "${LIBUV_WIN_UTIL_CONTENT}") file(WRITE "libuv/src/win/util.c" "${LIBUV_WIN_UTIL_CONTENT}") - endif () + endif() + add_subdirectory(libuv EXCLUDE_FROM_ALL) endif(${BUILD_WITH_UV}) @@ -536,6 +542,7 @@ if(${BUILD_WITH_SQLITE}) INTERFACE m INTERFACE pthread ) + if(NOT TD_WINDOWS) target_link_libraries(sqlite INTERFACE dl @@ -546,36 +553,38 @@ endif(${BUILD_WITH_SQLITE}) # addr2line if(${BUILD_ADDR2LINE}) if(NOT ${TD_WINDOWS}) - check_include_file( "sys/types.h" HAVE_SYS_TYPES_H) - check_include_file( "sys/stat.h" HAVE_SYS_STAT_H ) - check_include_file( "inttypes.h" HAVE_INTTYPES_H ) - check_include_file( "stddef.h" HAVE_STDDEF_H ) - check_include_file( "stdlib.h" HAVE_STDLIB_H ) - check_include_file( "string.h" HAVE_STRING_H ) - check_include_file( "memory.h" HAVE_MEMORY_H ) - check_include_file( "strings.h" HAVE_STRINGS_H ) - check_include_file( "stdint.h" HAVE_STDINT_H ) - check_include_file( "unistd.h" HAVE_UNISTD_H ) - check_include_file( "sgidefs.h" HAVE_SGIDEFS_H ) - check_include_file( "stdafx.h" HAVE_STDAFX_H ) - check_include_file( "elf.h" HAVE_ELF_H ) - check_include_file( "libelf.h" HAVE_LIBELF_H ) - check_include_file( "libelf/libelf.h" HAVE_LIBELF_LIBELF_H) - check_include_file( "alloca.h" HAVE_ALLOCA_H ) - check_include_file( "elfaccess.h" HAVE_ELFACCESS_H) - check_include_file( "sys/elf_386.h" HAVE_SYS_ELF_386_H ) - check_include_file( "sys/elf_amd64.h" HAVE_SYS_ELF_AMD64_H) - check_include_file( "sys/elf_sparc.h" HAVE_SYS_ELF_SPARC_H) - check_include_file( "sys/ia64/elf.h" HAVE_SYS_IA64_ELF_H ) + check_include_file("sys/types.h" HAVE_SYS_TYPES_H) + check_include_file("sys/stat.h" HAVE_SYS_STAT_H) + check_include_file("inttypes.h" HAVE_INTTYPES_H) + check_include_file("stddef.h" HAVE_STDDEF_H) + check_include_file("stdlib.h" HAVE_STDLIB_H) + check_include_file("string.h" HAVE_STRING_H) + check_include_file("memory.h" HAVE_MEMORY_H) + check_include_file("strings.h" HAVE_STRINGS_H) + check_include_file("stdint.h" HAVE_STDINT_H) + check_include_file("unistd.h" HAVE_UNISTD_H) + check_include_file("sgidefs.h" HAVE_SGIDEFS_H) + check_include_file("stdafx.h" HAVE_STDAFX_H) + check_include_file("elf.h" HAVE_ELF_H) + check_include_file("libelf.h" HAVE_LIBELF_H) + check_include_file("libelf/libelf.h" HAVE_LIBELF_LIBELF_H) + check_include_file("alloca.h" HAVE_ALLOCA_H) + check_include_file("elfaccess.h" HAVE_ELFACCESS_H) + check_include_file("sys/elf_386.h" HAVE_SYS_ELF_386_H) + check_include_file("sys/elf_amd64.h" HAVE_SYS_ELF_AMD64_H) + check_include_file("sys/elf_sparc.h" HAVE_SYS_ELF_SPARC_H) + check_include_file("sys/ia64/elf.h" HAVE_SYS_IA64_ELF_H) set(VERSION 0.3.1) set(PACKAGE_VERSION "\"${VERSION}\"") configure_file(libdwarf/cmake/config.h.cmake config.h) file(GLOB_RECURSE LIBDWARF_SOURCES "libdwarf/src/lib/libdwarf/*.c") add_library(libdwarf STATIC ${LIBDWARF_SOURCES}) set_target_properties(libdwarf PROPERTIES OUTPUT_NAME "libdwarf") + if(HAVE_LIBELF_H OR HAVE_LIBELF_LIBELF_H) target_link_libraries(libdwarf PUBLIC libelf) endif() + target_include_directories(libdwarf SYSTEM PUBLIC "libdwarf/src/lib/libdwarf" ${CMAKE_CURRENT_BINARY_DIR}) file(READ "addr2line/addr2line.c" ADDR2LINE_CONTENT) string(REPLACE "static int" "int" ADDR2LINE_CONTENT "${ADDR2LINE_CONTENT}") @@ -584,7 +593,7 @@ if(${BUILD_ADDR2LINE}) file(WRITE "addr2line/addr2line.c" "${ADDR2LINE_CONTENT}") add_library(addr2line STATIC "addr2line/addr2line.c") target_link_libraries(addr2line PUBLIC libdwarf dl z) - target_include_directories(addr2line PUBLIC "libdwarf/src/lib/libdwarf" ) + target_include_directories(addr2line PUBLIC "libdwarf/src/lib/libdwarf") endif(NOT ${TD_WINDOWS}) endif(${BUILD_ADDR2LINE}) @@ -593,36 +602,39 @@ if(${BUILD_GEOS}) if(${TD_LINUX}) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}") - if ("${CMAKE_BUILD_TYPE}" STREQUAL "") + + if("${CMAKE_BUILD_TYPE}" STREQUAL "") SET(CMAKE_BUILD_TYPE Release) endif() endif(${TD_LINUX}) + option(BUILD_SHARED_LIBS "Build GEOS with shared libraries" OFF) add_subdirectory(geos EXCLUDE_FROM_ALL) - if (${TD_WINDOWS}) + + if(${TD_WINDOWS}) SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") - else () + else() unset(CMAKE_CXX_STANDARD CACHE) # undo libgeos's setting of global CMAKE_CXX_STANDARD endif(${TD_WINDOWS}) + target_include_directories( geos_c PUBLIC $ ) endif(${BUILD_GEOS}) -if (${BUILD_PCRE2}) +if(${BUILD_PCRE2}) add_subdirectory(pcre2 EXCLUDE_FROM_ALL) endif(${BUILD_PCRE2}) if(${TD_LINUX} AND ${BUILD_WITH_S3}) - add_subdirectory(azure-cmake EXCLUDE_FROM_ALL) + add_subdirectory(azure-cmake EXCLUDE_FROM_ALL) endif() # ================================================================================================ # Build test # ================================================================================================ - MESSAGE("build with dependency tests: ${BUILD_DEPENDENCY_TESTS}") if(${BUILD_DEPENDENCY_TESTS}) diff --git a/contrib/azure-cmake/CMakeLists.txt b/contrib/azure-cmake/CMakeLists.txt index e4624361ed..aaa5617860 100644 --- a/contrib/azure-cmake/CMakeLists.txt +++ b/contrib/azure-cmake/CMakeLists.txt @@ -30,39 +30,42 @@ set(AZURE_SDK_INCLUDES add_library(_azure_sdk STATIC ${AZURE_SDK_UNIFIED_SRC}) target_compile_definitions(_azure_sdk PRIVATE BUILD_CURL_HTTP_TRANSPORT_ADAPTER) - target_include_directories( - _azure_sdk - PUBLIC "$ENV{HOME}/.cos-local.2/include" - ) +target_include_directories( + _azure_sdk + PUBLIC "$ENV{HOME}/.cos-local.2/include" +) find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) -#find_library(CURL_LIBRARY curl) -#find_library(XML2_LIBRARY xml2) + +# find_library(CURL_LIBRARY curl) +# find_library(XML2_LIBRARY xml2) find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) -#find_library(CoreFoundation_Library CoreFoundation) -#find_library(SystemConfiguration_Library SystemConfiguration) +# find_library(CoreFoundation_Library CoreFoundation) +# find_library(SystemConfiguration_Library SystemConfiguration) target_link_libraries( - _azure_sdk - PRIVATE ${CURL_LIBRARY} - PRIVATE ${SSL_LIBRARY} - PRIVATE ${CRYPTO_LIBRARY} - PRIVATE ${XML2_LIBRARY} - #PRIVATE xml2 - PRIVATE zlib -# PRIVATE ${CoreFoundation_Library} -# PRIVATE ${SystemConfiguration_Library} + _azure_sdk + PRIVATE ${CURL_LIBRARY} + PRIVATE ${SSL_LIBRARY} + PRIVATE ${CRYPTO_LIBRARY} + PRIVATE ${XML2_LIBRARY} + + # PRIVATE xml2 + PRIVATE zlib + + # PRIVATE ${CoreFoundation_Library} + # PRIVATE ${SystemConfiguration_Library} ) # Originally, on Windows azure-core is built with bcrypt and crypt32 by default -if (TARGET OpenSSL::SSL) +if(TARGET OpenSSL::SSL) target_link_libraries(_azure_sdk PRIVATE OpenSSL::Crypto OpenSSL::SSL) endif() # Originally, on Windows azure-core is built with winhttp by default -if (TARGET td_contrib::curl) +if(TARGET td_contrib::curl) target_link_libraries(_azure_sdk PRIVATE td_contrib::curl) endif() diff --git a/contrib/test/azure/CMakeLists.txt b/contrib/test/azure/CMakeLists.txt index 68571dce46..fade8c9ef6 100644 --- a/contrib/test/azure/CMakeLists.txt +++ b/contrib/test/azure/CMakeLists.txt @@ -1,19 +1,20 @@ set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED True) -add_executable ( - azure-test - main.cpp +add_executable( + azure-test + main.cpp ) find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) -#find_library(XML2_LIBRARY xml2) + +# find_library(XML2_LIBRARY xml2) find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) -#find_library(CoreFoundation_Library CoreFoundation) -#find_library(SystemConfiguration_Library SystemConfiguration) +# find_library(CoreFoundation_Library CoreFoundation) +# find_library(SystemConfiguration_Library SystemConfiguration) target_link_libraries( azure-test PRIVATE _azure_sdk diff --git a/docs/zh/14-reference/01-components/03-taosadapter.md b/docs/zh/14-reference/01-components/03-taosadapter.md index 7d69cab598..b74ee77862 100644 --- a/docs/zh/14-reference/01-components/03-taosadapter.md +++ b/docs/zh/14-reference/01-components/03-taosadapter.md @@ -330,7 +330,7 @@ taosAdapter 采集 REST/WebSocket 相关请求的监控指标。将监控指标 | other\_fail | INT UNSIGNED | | 其他失败请求数 | | query\_in\_process | INT UNSIGNED | | 正在处理查询请求数 | | write\_in\_process | INT UNSIGNED | | 正在处理写入请求数 | -| endpoint | VARCHAR | | 请求端点 | +| endpoint | VARCHAR | | 请求端点 | | req\_type | NCHAR | TAG | 请求类型:0 为 REST,1 为 WebSocket | ## 结果返回条数限制 diff --git a/docs/zh/14-reference/03-taos-sql/14-stream.md b/docs/zh/14-reference/03-taos-sql/14-stream.md index 3af8fa6921..cd5c76a4ad 100644 --- a/docs/zh/14-reference/03-taos-sql/14-stream.md +++ b/docs/zh/14-reference/03-taos-sql/14-stream.md @@ -153,7 +153,7 @@ SELECT * from information_schema.`ins_streams`; 由于窗口关闭是由事件时间决定的,如事件流中断、或持续延迟,则事件时间无法更新,可能导致无法得到最新的计算结果。 -因此,流式计算提供了以事件时间结合处理时间计算的 MAX_DELAY 触发模式。MAX_DELAY最小时间是5s,如果低于5s,创建流计算时会报错。 +因此,流式计算提供了以事件时间结合处理时间计算的 MAX_DELAY 触发模式。MAX_DELAY 最小时间是 5s,如果低于 5s,创建流计算时会报错。 MAX_DELAY 模式在窗口关闭时会立即触发计算。此外,当数据写入后,计算触发的时间超过 max delay 指定的时间,则立即触发计算 From b2b74d180772458195f0201c528703f8e45ccb91 Mon Sep 17 00:00:00 2001 From: Alex Duan <51781608+DuanKuanJun@users.noreply.github.com> Date: Thu, 24 Oct 2024 09:57:38 +0800 Subject: [PATCH 084/102] Update 08-taos-cli.md --- docs/zh/14-reference/02-tools/08-taos-cli.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/02-tools/08-taos-cli.md b/docs/zh/14-reference/02-tools/08-taos-cli.md index d0b4c0ca0f..a6f2f7ae05 100644 --- a/docs/zh/14-reference/02-tools/08-taos-cli.md +++ b/docs/zh/14-reference/02-tools/08-taos-cli.md @@ -71,7 +71,7 @@ taos> SET MAX_BINARY_DISPLAY_WIDTH ; - -l PKTLEN: 网络测试时使用的测试包大小 - -n NETROLE: 网络连接测试时的测试范围,默认为 `client`, 可选值为 `client`、`server` - -N PKTNUM: 网络测试时使用的测试包数量 -- -r: 将时间列转化为无符号 64 位整数类型输出(即 C 语音中 uint64_t) +- -r: 将时间列转化为无符号 64 位整数类型输出(即 C 语言中 uint64_t) - -R: 使用 RESTful 模式连接服务端 - -s COMMAND: 以非交互模式执行的 SQL 命令 - -t: 测试服务端启动状态,状态同-k From cd33915492c20c8349a0391c335d1acb7ba0c0f7 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Thu, 24 Oct 2024 10:08:44 +0800 Subject: [PATCH 085/102] fix tsma test case --- tests/system-test/2-query/tsma.py | 23 +++++++++++++++++++---- tests/system-test/test.py | 4 +++- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/system-test/2-query/tsma.py b/tests/system-test/2-query/tsma.py index 1e115cff79..acde26d5ca 100644 --- a/tests/system-test/2-query/tsma.py +++ b/tests/system-test/2-query/tsma.py @@ -693,7 +693,7 @@ class TDTestCase: "======== prepare test env include database, stable, ctables, and insert data: ") paraDict = {'dbName': db, 'dropFlag': 1, - 'vgroups': 2, + 'vgroups': 4, 'stbName': 'meters', 'colPrefix': 'c', 'tagPrefix': 't', @@ -1273,6 +1273,21 @@ class TDTestCase: else: tdLog.debug(f'wait query succeed: {sql} to return {expected_row_num}, got: {tdSql.getRows()}') + def wait_query_err(self, sql: str, timeout_in_seconds: float, err): + timeout = timeout_in_seconds + while timeout > 0: + try: + tdSql.query(sql, queryTimes=1) + time.sleep(1) + timeout = timeout - 1 + except: + tdSql.error(sql, err); + break + if timeout <= 0: + tdLog.exit(f'failed to wait query: {sql} to return error timeout: {timeout_in_seconds}s') + else: + tdLog.debug(f'wait query error succeed: {sql}') + def test_drop_tsma(self): function_name = sys._getframe().f_code.co_name tdLog.debug(f'-----{function_name}------') @@ -1339,14 +1354,14 @@ class TDTestCase: tdSql.execute('alter table test.t0 ttl 2', queryTimes=1) tdSql.execute('flush database test') res_tb = TSMAQCBuilder().md5('1.test.tsma1_t0') - self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) + self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) # test drop multi tables tdSql.execute('drop table test.t3, test.t4') res_tb = TSMAQCBuilder().md5('1.test.tsma1_t3') - self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) + self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) res_tb = TSMAQCBuilder().md5('1.test.tsma1_t4') - self.wait_query(f'select * from information_schema.ins_tables where table_name = "{res_tb}"', 0, wait_query_seconds) + self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) # test drop stream tdSql.error('drop stream tsma1', -2147471088) ## TSMA must be dropped first diff --git a/tests/system-test/test.py b/tests/system-test/test.py index 9defcd083a..57a4789f2e 100644 --- a/tests/system-test/test.py +++ b/tests/system-test/test.py @@ -24,6 +24,8 @@ import platform import socket import threading import importlib +print(f"Python version: {sys.version}") +print(f"Version info: {sys.version_info}") import toml sys.path.append("../pytest") @@ -687,6 +689,6 @@ if __name__ == "__main__": if conn is not None: conn.close() if asan: - # tdDnodes.StopAllSigint() + tdDnodes.StopAllSigint() tdLog.info("Address sanitizer mode finished") sys.exit(0) From 81c30ab7c04740a1e9f60af03f4c2b8d7a85891d Mon Sep 17 00:00:00 2001 From: Shengliang Guan Date: Thu, 24 Oct 2024 10:09:54 +0800 Subject: [PATCH 086/102] fix: format cmake file --- CMakeLists.txt | 18 +- source/client/CMakeLists.txt | 6 +- source/common/CMakeLists.txt | 173 ++++++++-------- source/dnode/mgmt/CMakeLists.txt | 29 ++- source/dnode/mgmt/mgmt_dnode/CMakeLists.txt | 19 +- source/dnode/mgmt/node_mgmt/CMakeLists.txt | 14 +- source/dnode/mnode/impl/CMakeLists.txt | 36 ++-- source/dnode/vnode/CMakeLists.txt | 133 ++++++------- source/libs/audit/CMakeLists.txt | 5 +- source/libs/azure/CMakeLists.txt | 23 +-- source/libs/azure/test/CMakeLists.txt | 36 ++-- source/libs/catalog/CMakeLists.txt | 6 +- source/libs/command/CMakeLists.txt | 2 +- source/libs/crypt/CMakeLists.txt | 4 +- source/libs/executor/CMakeLists.txt | 19 +- source/libs/function/CMakeLists.txt | 178 +++++++++-------- source/libs/index/CMakeLists.txt | 35 ++-- source/libs/monitor/CMakeLists.txt | 2 +- source/libs/monitorfw/CMakeLists.txt | 4 +- source/libs/parser/CMakeLists.txt | 8 +- source/libs/qworker/CMakeLists.txt | 4 +- source/libs/scheduler/CMakeLists.txt | 2 +- source/libs/stream/CMakeLists.txt | 60 +++--- source/libs/stream/test/CMakeLists.txt | 136 +++++++------ source/libs/tcs/CMakeLists.txt | 1 + source/libs/tcs/test/CMakeLists.txt | 36 ++-- source/libs/tfs/CMakeLists.txt | 2 +- source/libs/transport/CMakeLists.txt | 44 ++--- source/util/CMakeLists.txt | 23 +-- tools/CMakeLists.txt | 208 ++++++++++---------- tools/shell/CMakeLists.txt | 24 +-- utils/CMakeLists.txt | 6 +- utils/TSZ/CMakeLists.txt | 20 +- utils/test/c/CMakeLists.txt | 17 +- 34 files changed, 665 insertions(+), 668 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index ac368c29fe..db5b89db3d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,13 +1,13 @@ cmake_minimum_required(VERSION 3.0) project( - TDengine - VERSION 3.0 - DESCRIPTION "An open-source big data platform designed and optimized for the Internet of Things(IOT)" + TDengine + VERSION 3.0 + DESCRIPTION "An open-source big data platform designed and optimized for the Internet of Things(IOT)" ) -if (NOT DEFINED TD_SOURCE_DIR) - set( TD_SOURCE_DIR ${PROJECT_SOURCE_DIR} ) +if(NOT DEFINED TD_SOURCE_DIR) + set(TD_SOURCE_DIR ${PROJECT_SOURCE_DIR}) endif() SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR}) @@ -15,13 +15,11 @@ SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR}) set(TD_SUPPORT_DIR "${TD_SOURCE_DIR}/cmake") set(TD_CONTRIB_DIR "${TD_SOURCE_DIR}/contrib") - include(${TD_SUPPORT_DIR}/cmake.platform) include(${TD_SUPPORT_DIR}/cmake.define) include(${TD_SUPPORT_DIR}/cmake.options) include(${TD_SUPPORT_DIR}/cmake.version) - # contrib add_subdirectory(contrib) @@ -33,8 +31,8 @@ target_include_directories(api INTERFACE "include/client") # src if(${BUILD_TEST}) - include(CTest) - enable_testing() + include(CTest) + enable_testing() endif(${BUILD_TEST}) add_subdirectory(source) @@ -44,5 +42,5 @@ add_subdirectory(examples/c) add_subdirectory(tests) include(${TD_SUPPORT_DIR}/cmake.install) -# docs +# docs add_subdirectory(docs/doxgen) diff --git a/source/client/CMakeLists.txt b/source/client/CMakeLists.txt index 84747860e9..bbd18892ab 100644 --- a/source/client/CMakeLists.txt +++ b/source/client/CMakeLists.txt @@ -1,8 +1,8 @@ aux_source_directory(src CLIENT_SRC) -IF (TD_ENTERPRISE) - LIST(APPEND CLIENT_SRC ${TD_ENTERPRISE_DIR}/src/plugins/view/src/clientView.c) -ENDIF () +if(TD_ENTERPRISE) + LIST(APPEND CLIENT_SRC ${TD_ENTERPRISE_DIR}/src/plugins/view/src/clientView.c) +endif() if(TD_WINDOWS) add_library(taos SHARED ${CLIENT_SRC} ${CMAKE_CURRENT_SOURCE_DIR}/src/taos.rc.in) diff --git a/source/common/CMakeLists.txt b/source/common/CMakeLists.txt index eb3dd95e95..42a7c2c615 100644 --- a/source/common/CMakeLists.txt +++ b/source/common/CMakeLists.txt @@ -1,121 +1,122 @@ aux_source_directory(src COMMON_SRC) -IF (TD_ENTERPRISE) -LIST(APPEND COMMON_SRC ${TD_ENTERPRISE_DIR}/src/plugins/common/src/tglobal.c) -ENDIF() + +if(TD_ENTERPRISE) + LIST(APPEND COMMON_SRC ${TD_ENTERPRISE_DIR}/src/plugins/common/src/tglobal.c) +endif() add_library(common STATIC ${COMMON_SRC}) -if (DEFINED GRANT_CFG_INCLUDE_DIR) +if(DEFINED GRANT_CFG_INCLUDE_DIR) add_definitions(-DGRANTS_CFG) endif() -IF (TD_GRANT) +if(TD_GRANT) ADD_DEFINITIONS(-D_GRANT) -ENDIF () +endif() -IF (TD_STORAGE) +if(TD_STORAGE) ADD_DEFINITIONS(-D_STORAGE) TARGET_LINK_LIBRARIES(common PRIVATE storage) -ENDIF () +endif() -IF (TD_ENTERPRISE) - IF(${BUILD_WITH_S3}) +if(TD_ENTERPRISE) + if(${BUILD_WITH_S3}) add_definitions(-DUSE_S3) ELSEIF(${BUILD_WITH_COS}) add_definitions(-DUSE_COS) - ENDIF() -ENDIF() + endif() +endif() target_include_directories( - common - PUBLIC "${TD_SOURCE_DIR}/include/common" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" - PRIVATE "${GRANT_CFG_INCLUDE_DIR}" + common + PUBLIC "${TD_SOURCE_DIR}/include/common" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + PRIVATE "${GRANT_CFG_INCLUDE_DIR}" ) -IF(${TD_WINDOWS}) - target_include_directories( - common - PRIVATE "${TD_SOURCE_DIR}/contrib/pthread" - PRIVATE "${TD_SOURCE_DIR}/contrib/msvcregex" - ) -ENDIF () + +if(${TD_WINDOWS}) + target_include_directories( + common + PRIVATE "${TD_SOURCE_DIR}/contrib/pthread" + PRIVATE "${TD_SOURCE_DIR}/contrib/msvcregex" + ) +endif() target_link_libraries( - common - PUBLIC os - PUBLIC util - INTERFACE api + common + PUBLIC os + PUBLIC util + INTERFACE api ) if(${BUILD_S3}) + if(${BUILD_WITH_S3}) + target_include_directories( + common - if(${BUILD_WITH_S3}) - target_include_directories( - common + PUBLIC "$ENV{HOME}/.cos-local.2/include" + ) - PUBLIC "$ENV{HOME}/.cos-local.2/include" - ) + set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") + set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.2) + find_library(S3_LIBRARY s3) + find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) + find_library(XML2_LIBRARY xml2) + find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) + find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) + target_link_libraries( + common - set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") - set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.2) - find_library(S3_LIBRARY s3) - find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) - find_library(XML2_LIBRARY xml2) - find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) - find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH) - target_link_libraries( - common + # s3 + PUBLIC ${S3_LIBRARY} + PUBLIC ${CURL_LIBRARY} + PUBLIC ${SSL_LIBRARY} + PUBLIC ${CRYPTO_LIBRARY} + PUBLIC ${XML2_LIBRARY} + ) - # s3 - PUBLIC ${S3_LIBRARY} - PUBLIC ${CURL_LIBRARY} - PUBLIC ${SSL_LIBRARY} - PUBLIC ${CRYPTO_LIBRARY} - PUBLIC ${XML2_LIBRARY} - ) + add_definitions(-DUSE_S3) + endif() - add_definitions(-DUSE_S3) + if(${BUILD_WITH_COS}) + set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") + find_library(APR_LIBRARY apr-1 PATHS /usr/local/apr/lib/) + find_library(APR_UTIL_LIBRARY aprutil-1 PATHS /usr/local/apr/lib/) + find_library(MINIXML_LIBRARY mxml) + find_library(CURL_LIBRARY curl) + target_link_libraries( + common + + # s3 + PUBLIC cos_c_sdk_static + PUBLIC ${APR_UTIL_LIBRARY} + PUBLIC ${APR_LIBRARY} + PUBLIC ${MINIXML_LIBRARY} + PUBLIC ${CURL_LIBRARY} + ) + + # s3 + FIND_PROGRAM(APR_CONFIG_BIN NAMES apr-config apr-1-config PATHS /usr/bin /usr/local/bin /usr/local/apr/bin/) + + if(APR_CONFIG_BIN) + EXECUTE_PROCESS( + COMMAND ${APR_CONFIG_BIN} --includedir + OUTPUT_VARIABLE APR_INCLUDE_DIR + OUTPUT_STRIP_TRAILING_WHITESPACE + ) endif() - if(${BUILD_WITH_COS}) - - set(CMAKE_FIND_LIBRARY_SUFFIXES ".a") - find_library(APR_LIBRARY apr-1 PATHS /usr/local/apr/lib/) - find_library(APR_UTIL_LIBRARY aprutil-1 PATHS /usr/local/apr/lib/) - find_library(MINIXML_LIBRARY mxml) - find_library(CURL_LIBRARY curl) - target_link_libraries( - common - - # s3 - PUBLIC cos_c_sdk_static - PUBLIC ${APR_UTIL_LIBRARY} - PUBLIC ${APR_LIBRARY} - PUBLIC ${MINIXML_LIBRARY} - PUBLIC ${CURL_LIBRARY} - ) - - # s3 - FIND_PROGRAM(APR_CONFIG_BIN NAMES apr-config apr-1-config PATHS /usr/bin /usr/local/bin /usr/local/apr/bin/) - IF (APR_CONFIG_BIN) - EXECUTE_PROCESS( - COMMAND ${APR_CONFIG_BIN} --includedir - OUTPUT_VARIABLE APR_INCLUDE_DIR - OUTPUT_STRIP_TRAILING_WHITESPACE - ) - ENDIF() - include_directories (${APR_INCLUDE_DIR}) - target_include_directories( - common - PUBLIC "${TD_SOURCE_DIR}/contrib/cos-c-sdk-v5/cos_c_sdk" - PUBLIC "$ENV{HOME}/.cos-local.1/include" - ) - - add_definitions(-DUSE_COS) - endif(${BUILD_WITH_COS}) + include_directories(${APR_INCLUDE_DIR}) + target_include_directories( + common + PUBLIC "${TD_SOURCE_DIR}/contrib/cos-c-sdk-v5/cos_c_sdk" + PUBLIC "$ENV{HOME}/.cos-local.1/include" + ) + add_definitions(-DUSE_COS) + endif(${BUILD_WITH_COS}) endif() if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/dnode/mgmt/CMakeLists.txt b/source/dnode/mgmt/CMakeLists.txt index d72301279e..5d356e06b1 100644 --- a/source/dnode/mgmt/CMakeLists.txt +++ b/source/dnode/mgmt/CMakeLists.txt @@ -10,29 +10,28 @@ add_subdirectory(test) aux_source_directory(exe EXEC_SRC) add_executable(taosd ${EXEC_SRC}) target_include_directories( - taosd - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/node_mgmt/inc" + taosd + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/node_mgmt/inc" ) -IF (TD_ENTERPRISE) +IF(TD_ENTERPRISE) IF(${BUILD_WITH_S3}) - add_definitions(-DUSE_S3) + add_definitions(-DUSE_S3) ELSEIF(${BUILD_WITH_COS}) - add_definitions(-DUSE_COS) + add_definitions(-DUSE_COS) ENDIF() ENDIF() -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc) SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc") -ELSE () +ELSE() SET(LINK_JEMALLOC "") -ENDIF () - -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) - ADD_DEPENDENCIES(taosd jemalloc) - target_link_libraries(taosd dnode crypt ${LINK_JEMALLOC}) -ELSE () - target_link_libraries(taosd dnode crypt) -ENDIF () +ENDIF() +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) + ADD_DEPENDENCIES(taosd jemalloc) + target_link_libraries(taosd dnode crypt ${LINK_JEMALLOC}) +ELSE() + target_link_libraries(taosd dnode crypt) +ENDIF() diff --git a/source/dnode/mgmt/mgmt_dnode/CMakeLists.txt b/source/dnode/mgmt/mgmt_dnode/CMakeLists.txt index f7920d3d8e..76e51ac44f 100644 --- a/source/dnode/mgmt/mgmt_dnode/CMakeLists.txt +++ b/source/dnode/mgmt/mgmt_dnode/CMakeLists.txt @@ -1,24 +1,25 @@ aux_source_directory(src MGMT_DNODE) add_library(mgmt_dnode STATIC ${MGMT_DNODE}) -if (DEFINED GRANT_CFG_INCLUDE_DIR) + +if(DEFINED GRANT_CFG_INCLUDE_DIR) add_definitions(-DGRANTS_CFG) endif() -IF (NOT BUILD_DM_MODULE) +if(NOT BUILD_DM_MODULE) MESSAGE(STATUS "NOT BUILD_DM_MODULE") target_link_directories( mgmt_dnode PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/dm_static" ) -ELSE() - MESSAGE(STATUS "BUILD_DM_MODULE") -ENDIF() +else() + MESSAGE(STATUS "BUILD_DM_MODULE") +endif() target_include_directories( - mgmt_dnode - PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/inc" - PUBLIC "${GRANT_CFG_INCLUDE_DIR}" + mgmt_dnode + PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/inc" + PUBLIC "${GRANT_CFG_INCLUDE_DIR}" ) target_link_libraries( - mgmt_dnode node_util dmodule + mgmt_dnode node_util dmodule ) \ No newline at end of file diff --git a/source/dnode/mgmt/node_mgmt/CMakeLists.txt b/source/dnode/mgmt/node_mgmt/CMakeLists.txt index 98de62eee1..f5198681bc 100644 --- a/source/dnode/mgmt/node_mgmt/CMakeLists.txt +++ b/source/dnode/mgmt/node_mgmt/CMakeLists.txt @@ -1,22 +1,22 @@ aux_source_directory(src IMPLEMENT_SRC) add_library(dnode STATIC ${IMPLEMENT_SRC}) target_link_libraries( - dnode mgmt_mnode mgmt_qnode mgmt_snode mgmt_vnode mgmt_dnode monitorfw tcs + dnode mgmt_mnode mgmt_qnode mgmt_snode mgmt_vnode mgmt_dnode monitorfw tcs ) -IF (TD_ENTERPRISE) +IF(TD_ENTERPRISE) IF(${BUILD_WITH_S3}) - add_definitions(-DUSE_S3) + add_definitions(-DUSE_S3) ELSEIF(${BUILD_WITH_COS}) - add_definitions(-DUSE_COS) + add_definitions(-DUSE_COS) ENDIF() ENDIF() -IF (DEFINED GRANT_CFG_INCLUDE_DIR) +IF(DEFINED GRANT_CFG_INCLUDE_DIR) add_definitions(-DGRANTS_CFG) ENDIF() target_include_directories( - dnode - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + dnode + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) diff --git a/source/dnode/mnode/impl/CMakeLists.txt b/source/dnode/mnode/impl/CMakeLists.txt index 1a74b6fa9f..8a390948ae 100644 --- a/source/dnode/mnode/impl/CMakeLists.txt +++ b/source/dnode/mnode/impl/CMakeLists.txt @@ -1,44 +1,46 @@ aux_source_directory(src MNODE_SRC) -IF (TD_PRIVILEGE) + +if(TD_PRIVILEGE) ADD_DEFINITIONS(-D_PRIVILEGE) -ENDIF () -IF (TD_ENTERPRISE) +endif() + +if(TD_ENTERPRISE) LIST(APPEND MNODE_SRC ${TD_ENTERPRISE_DIR}/src/plugins/privilege/src/privilege.c) LIST(APPEND MNODE_SRC ${TD_ENTERPRISE_DIR}/src/plugins/mnode/src/mndDb.c) LIST(APPEND MNODE_SRC ${TD_ENTERPRISE_DIR}/src/plugins/mnode/src/mndVgroup.c) LIST(APPEND MNODE_SRC ${TD_ENTERPRISE_DIR}/src/plugins/mnode/src/mndDnode.c) LIST(APPEND MNODE_SRC ${TD_ENTERPRISE_DIR}/src/plugins/view/src/mndView.c) - IF(${BUILD_WITH_S3}) + if(${BUILD_WITH_S3}) add_definitions(-DUSE_S3) ELSEIF(${BUILD_WITH_COS}) add_definitions(-DUSE_COS) - ENDIF() + endif() - IF(${BUILD_WITH_ANALYSIS}) + if(${BUILD_WITH_ANALYSIS}) add_definitions(-DUSE_ANAL) - ENDIF() -ENDIF () + endif() +endif() add_library(mnode STATIC ${MNODE_SRC}) target_include_directories( - mnode - PUBLIC "${TD_SOURCE_DIR}/include/dnode/mnode" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + mnode + PUBLIC "${TD_SOURCE_DIR}/include/dnode/mnode" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) target_link_libraries( - mnode scheduler sdb wal transport cjson sync monitor executor qworker stream parser audit monitorfw + mnode scheduler sdb wal transport cjson sync monitor executor qworker stream parser audit monitorfw ) -IF (DEFINED GRANT_CFG_INCLUDE_DIR) +if(DEFINED GRANT_CFG_INCLUDE_DIR) add_definitions(-DGRANTS_CFG) -ENDIF() +endif() -IF (TD_GRANT) +if(TD_GRANT) TARGET_LINK_LIBRARIES(mnode grant) ADD_DEFINITIONS(-D_GRANT) -ENDIF () +endif() if(${BUILD_TEST}) - add_subdirectory(test) + add_subdirectory(test) endif(${BUILD_TEST}) diff --git a/source/dnode/vnode/CMakeLists.txt b/source/dnode/vnode/CMakeLists.txt index 83ed98d7b7..c377e69f03 100644 --- a/source/dnode/vnode/CMakeLists.txt +++ b/source/dnode/vnode/CMakeLists.txt @@ -1,24 +1,26 @@ # vnode add_subdirectory(src/tqCommon) add_library(vnode STATIC "") + if(${TD_DARWIN}) target_compile_options(vnode PRIVATE -Wno-error=single-bit-bitfield-constant-conversion) endif(${TD_DARWIN}) + set( - VNODE_SOURCE_FILES - "src/vnd/vnodeOpen.c" - "src/vnd/vnodeBufPool.c" - "src/vnd/vnodeCfg.c" - "src/vnd/vnodeCommit.c" - "src/vnd/vnodeQuery.c" - "src/vnd/vnodeModule.c" - "src/vnd/vnodeSvr.c" - "src/vnd/vnodeSync.c" - "src/vnd/vnodeSnapshot.c" - "src/vnd/vnodeRetention.c" - "src/vnd/vnodeInitApi.c" - "src/vnd/vnodeAsync.c" - "src/vnd/vnodeHash.c" + VNODE_SOURCE_FILES + "src/vnd/vnodeOpen.c" + "src/vnd/vnodeBufPool.c" + "src/vnd/vnodeCfg.c" + "src/vnd/vnodeCommit.c" + "src/vnd/vnodeQuery.c" + "src/vnd/vnodeModule.c" + "src/vnd/vnodeSvr.c" + "src/vnd/vnodeSync.c" + "src/vnd/vnodeSnapshot.c" + "src/vnd/vnodeRetention.c" + "src/vnd/vnodeInitApi.c" + "src/vnd/vnodeAsync.c" + "src/vnd/vnodeHash.c" # meta "src/meta/metaOpen.c" @@ -40,23 +42,23 @@ set( "src/sma/smaSnapshot.c" "src/sma/smaTimeRange.c" - # # tsdb - # "src/tsdb/tsdbCommit.c" - # "src/tsdb/tsdbFile.c" - # "src/tsdb/tsdbFS.c" - # "src/tsdb/tsdbOpen.c" - # "src/tsdb/tsdbMemTable.c" - # "src/tsdb/tsdbRead.c" - # "src/tsdb/tsdbCache.c" - # "src/tsdb/tsdbWrite.c" - # "src/tsdb/tsdbReaderWriter.c" - # "src/tsdb/tsdbUtil.c" - # "src/tsdb/tsdbSnapshot.c" - # "src/tsdb/tsdbCacheRead.c" - # "src/tsdb/tsdbRetention.c" - # "src/tsdb/tsdbDiskData.c" - # "src/tsdb/tsdbMergeTree.c" - # "src/tsdb/tsdbDataIter.c" + # # tsdb + # "src/tsdb/tsdbCommit.c" + # "src/tsdb/tsdbFile.c" + # "src/tsdb/tsdbFS.c" + # "src/tsdb/tsdbOpen.c" + # "src/tsdb/tsdbMemTable.c" + # "src/tsdb/tsdbRead.c" + # "src/tsdb/tsdbCache.c" + # "src/tsdb/tsdbWrite.c" + # "src/tsdb/tsdbReaderWriter.c" + # "src/tsdb/tsdbUtil.c" + # "src/tsdb/tsdbSnapshot.c" + # "src/tsdb/tsdbCacheRead.c" + # "src/tsdb/tsdbRetention.c" + # "src/tsdb/tsdbDiskData.c" + # "src/tsdb/tsdbMergeTree.c" + # "src/tsdb/tsdbDataIter.c" # tq "src/tq/tq.c" @@ -71,14 +73,13 @@ set( "src/tq/tqSnapshot.c" "src/tq/tqStreamStateSnap.c" "src/tq/tqStreamTaskSnap.c" - ) aux_source_directory("src/tsdb/" TSDB_SOURCE_FILES) list( - APPEND - VNODE_SOURCE_FILES - ${TSDB_SOURCE_FILES} + APPEND + VNODE_SOURCE_FILES + ${TSDB_SOURCE_FILES} ) target_sources( @@ -87,34 +88,33 @@ target_sources( ${VNODE_SOURCE_FILES} ) -IF (TD_VNODE_PLUGINS) - target_sources( - vnode - PRIVATE - ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/tsdbCompact.c - ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/tsdbCompactMonitor.c - ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/vnodeCompact.c - ) -ENDIF () +if(TD_VNODE_PLUGINS) + target_sources( + vnode + PRIVATE + ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/tsdbCompact.c + ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/tsdbCompactMonitor.c + ${TD_ENTERPRISE_DIR}/src/plugins/vnode/src/vnodeCompact.c + ) +endif() -# IF (NOT ${TD_LINUX}) +# if (NOT ${TD_LINUX}) # target_include_directories( -# vnode -# PUBLIC "inc" -# PUBLIC "src/inc" -# PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" -# PUBLIC "${TD_SOURCE_DIR}/contrib/rocksdb/include" +# vnode +# PUBLIC "inc" +# PUBLIC "src/inc" +# PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" +# PUBLIC "${TD_SOURCE_DIR}/contrib/rocksdb/include" # ) # ELSE() # target_include_directories( -# vnode -# PUBLIC "inc" -# PUBLIC "src/inc" -# PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" +# vnode +# PUBLIC "inc" +# PUBLIC "src/inc" +# PUBLIC "${TD_SOURCE_DIR}/include/libs/scalar" # ) -#ENDIF(NOT ${TD_LINUX}) - -if (${BUILD_CONTRIB}) +# endif(NOT ${TD_LINUX}) +if(${BUILD_CONTRIB}) target_include_directories( vnode PUBLIC "inc" @@ -135,20 +135,21 @@ else() PUBLIC "${TD_SOURCE_DIR}/include/libs/crypt" PUBLIC "${TD_SOURCE_DIR}/include/dnode/vnode" ) - if (${TD_LINUX}) - target_include_directories( - vnode + + if(${TD_LINUX}) + target_include_directories( + vnode PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" ) target_link_directories( - vnode + vnode PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" ) endif() endif() target_link_directories( - vnode + vnode PUBLIC "${CMAKE_BINARY_DIR}/build/lib" ) @@ -170,7 +171,7 @@ target_link_libraries( # PUBLIC bdb # PUBLIC scalar - #PUBLIC zstd + # PUBLIC zstd PUBLIC rocksdb PUBLIC transport PUBLIC stream @@ -178,9 +179,9 @@ target_link_libraries( PUBLIC tqCommon ) -IF (TD_GRANT) - TARGET_LINK_LIBRARIES(vnode PUBLIC grant) -ENDIF () +if(TD_GRANT) + TARGET_LINK_LIBRARIES(vnode PUBLIC grant) +endif() target_compile_definitions(vnode PUBLIC -DMETA_REFACT) diff --git a/source/libs/audit/CMakeLists.txt b/source/libs/audit/CMakeLists.txt index 2a04f084f1..14648cc1a2 100644 --- a/source/libs/audit/CMakeLists.txt +++ b/source/libs/audit/CMakeLists.txt @@ -1,7 +1,8 @@ aux_source_directory(src AUDIT_SRC) -IF (TD_ENTERPRISE) + +IF(TD_ENTERPRISE) LIST(APPEND AUDIT_SRC ${TD_ENTERPRISE_DIR}/src/plugins/audit/src/audit.c) -ENDIF () +ENDIF() add_library(audit STATIC ${AUDIT_SRC}) target_include_directories( diff --git a/source/libs/azure/CMakeLists.txt b/source/libs/azure/CMakeLists.txt index 1d46a2924b..1516a35c4d 100644 --- a/source/libs/azure/CMakeLists.txt +++ b/source/libs/azure/CMakeLists.txt @@ -1,4 +1,4 @@ -#if(${TD_LINUX}) +# if(${TD_LINUX}) aux_source_directory(src AZ_SRC) add_library(az STATIC ${AZ_SRC}) @@ -13,20 +13,21 @@ if(${BUILD_S3}) endif() target_include_directories( - az - PUBLIC "${TD_SOURCE_DIR}/include/libs/azure" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + az + PUBLIC "${TD_SOURCE_DIR}/include/libs/azure" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) target_link_libraries( - az - PUBLIC cjson - PUBLIC os - PUBLIC util - PUBLIC common + az + PUBLIC cjson + PUBLIC os + PUBLIC util + PUBLIC common ) if(${BUILD_TEST}) - add_subdirectory(test) + add_subdirectory(test) endif(${BUILD_TEST}) -#endif(${TD_LINUX}) + +# endif(${TD_LINUX}) diff --git a/source/libs/azure/test/CMakeLists.txt b/source/libs/azure/test/CMakeLists.txt index ea91dbd2fc..f00257f228 100644 --- a/source/libs/azure/test/CMakeLists.txt +++ b/source/libs/azure/test/CMakeLists.txt @@ -1,22 +1,20 @@ -if (TD_LINUX) +if(TD_LINUX) + aux_source_directory(. AZ_TEST_SRC) -aux_source_directory(. AZ_TEST_SRC) - -add_executable(azTest ${AZ_TEST_SRC}) -target_include_directories(azTest - PUBLIC - "${TD_SOURCE_DIR}/include/libs/azure" - "${CMAKE_CURRENT_SOURCE_DIR}/../inc" -) - -target_link_libraries(azTest - az - gtest_main -) -enable_testing() -add_test( - NAME az_test - COMMAND azTest -) + add_executable(azTest ${AZ_TEST_SRC}) + target_include_directories(azTest + PUBLIC + "${TD_SOURCE_DIR}/include/libs/azure" + "${CMAKE_CURRENT_SOURCE_DIR}/../inc" + ) + target_link_libraries(azTest + az + gtest_main + ) + enable_testing() + add_test( + NAME az_test + COMMAND azTest + ) endif(TD_LINUX) diff --git a/source/libs/catalog/CMakeLists.txt b/source/libs/catalog/CMakeLists.txt index 6f09af8a8f..179781c2c9 100644 --- a/source/libs/catalog/CMakeLists.txt +++ b/source/libs/catalog/CMakeLists.txt @@ -7,10 +7,10 @@ target_include_directories( ) target_link_libraries( - catalog - PRIVATE os util transport qcom nodes + catalog + PRIVATE os util transport qcom nodes ) # if(${BUILD_TEST}) -# ADD_SUBDIRECTORY(test) +# ADD_SUBDIRECTORY(test) # endif(${BUILD_TEST}) diff --git a/source/libs/command/CMakeLists.txt b/source/libs/command/CMakeLists.txt index a890972d14..308f652861 100644 --- a/source/libs/command/CMakeLists.txt +++ b/source/libs/command/CMakeLists.txt @@ -12,5 +12,5 @@ target_link_libraries( ) if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/libs/crypt/CMakeLists.txt b/source/libs/crypt/CMakeLists.txt index e6d73b1882..c29c9a4a29 100644 --- a/source/libs/crypt/CMakeLists.txt +++ b/source/libs/crypt/CMakeLists.txt @@ -1,8 +1,8 @@ aux_source_directory(src CRYPT_SRC) -IF (TD_ENTERPRISE) +IF(TD_ENTERPRISE) LIST(APPEND CRYPT_SRC ${TD_ENTERPRISE_DIR}/src/plugins/crypt/cryptImpl.c) -ENDIF () +ENDIF() add_library(crypt STATIC ${CRYPT_SRC}) target_include_directories( diff --git a/source/libs/executor/CMakeLists.txt b/source/libs/executor/CMakeLists.txt index af2c3986aa..014b538375 100644 --- a/source/libs/executor/CMakeLists.txt +++ b/source/libs/executor/CMakeLists.txt @@ -1,24 +1,25 @@ aux_source_directory(src EXECUTOR_SRC) add_library(executor STATIC ${EXECUTOR_SRC}) + if(${TD_DARWIN}) - target_compile_options(executor PRIVATE -Wno-error=deprecated-non-prototype) + target_compile_options(executor PRIVATE -Wno-error=deprecated-non-prototype) endif(${TD_DARWIN}) -IF(${BUILD_WITH_ANALYSIS}) +if(${BUILD_WITH_ANALYSIS}) add_definitions(-DUSE_ANAL) -ENDIF() +endif() target_link_libraries(executor - PRIVATE os util common function parser planner qcom scalar nodes index wal tdb geometry - ) + PRIVATE os util common function parser planner qcom scalar nodes index wal tdb geometry +) target_include_directories( - executor - PUBLIC "${TD_SOURCE_DIR}/include/libs/executor" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + executor + PUBLIC "${TD_SOURCE_DIR}/include/libs/executor" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/libs/function/CMakeLists.txt b/source/libs/function/CMakeLists.txt index 3a68648d49..4164852111 100644 --- a/source/libs/function/CMakeLists.txt +++ b/source/libs/function/CMakeLists.txt @@ -5,115 +5,114 @@ add_library(function STATIC ${FUNCTION_SRC} ${FUNCTION_SRC_DETAIL}) target_include_directories( function PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/contrib/libuv/include" + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/contrib/libuv/include" PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) - ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc) - SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc") -ELSE () - SET(LINK_JEMALLOC "") -ENDIF () +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) + ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc) + SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc") +ELSE() + SET(LINK_JEMALLOC "") +ENDIF() -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(function jemalloc) -ENDIF () +ENDIF() target_link_libraries( function - PRIVATE os - PRIVATE util - PRIVATE common - PRIVATE nodes - PRIVATE qcom - PRIVATE scalar - PRIVATE geometry - PRIVATE transport + PRIVATE os + PRIVATE util + PRIVATE common + PRIVATE nodes + PRIVATE qcom + PRIVATE scalar + PRIVATE geometry + PRIVATE transport PUBLIC uv_a ) add_executable(runUdf test/runUdf.c) target_include_directories( - runUdf - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/contrib/libuv/include" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + runUdf + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/contrib/libuv/include" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(runUdf jemalloc) -ENDIF () +ENDIF() target_link_libraries( - runUdf - PUBLIC uv_a - PRIVATE os util common nodes function ${LINK_JEMALLOC} + runUdf + PUBLIC uv_a + PRIVATE os util common nodes function ${LINK_JEMALLOC} ) add_library(udf1 STATIC MODULE test/udf1.c) target_include_directories( - udf1 - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + udf1 + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(udf1 jemalloc) -ENDIF () +ENDIF() target_link_libraries( udf1 PUBLIC os ${LINK_JEMALLOC}) - add_library(udf1_dup STATIC MODULE test/udf1_dup.c) target_include_directories( - udf1_dup - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + udf1_dup + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(udf1_dup jemalloc) -ENDIF () +ENDIF() target_link_libraries( udf1_dup PUBLIC os ${LINK_JEMALLOC}) add_library(udf2 STATIC MODULE test/udf2.c) target_include_directories( - udf2 - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + udf2 + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(udf2 jemalloc) -ENDIF () +ENDIF() target_link_libraries( udf2 PUBLIC os ${LINK_JEMALLOC} @@ -121,45 +120,44 @@ target_link_libraries( add_library(udf2_dup STATIC MODULE test/udf2_dup.c) target_include_directories( - udf2_dup - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/client" - "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + udf2_dup + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/client" + "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(udf2_dup jemalloc) -ENDIF () +ENDIF() target_link_libraries( udf2_dup PUBLIC os ${LINK_JEMALLOC} ) -#SET(EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR}/build/bin) +# SET(EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR}/build/bin) add_executable(udfd src/udfd.c) target_include_directories( - udfd - PUBLIC - "${TD_SOURCE_DIR}/include/libs/function" - "${TD_SOURCE_DIR}/contrib/libuv/include" - "${TD_SOURCE_DIR}/include/util" - "${TD_SOURCE_DIR}/include/common" - "${TD_SOURCE_DIR}/include/libs/transport" - "${TD_SOURCE_DIR}/include/client" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + udfd + PUBLIC + "${TD_SOURCE_DIR}/include/libs/function" + "${TD_SOURCE_DIR}/contrib/libuv/include" + "${TD_SOURCE_DIR}/include/util" + "${TD_SOURCE_DIR}/include/common" + "${TD_SOURCE_DIR}/include/libs/transport" + "${TD_SOURCE_DIR}/include/client" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEPENDENCIES(udfd jemalloc) -ENDIF () +ENDIF() target_link_libraries( udfd PUBLIC uv_a PRIVATE os util common nodes function ${LINK_JEMALLOC} - ) - +) diff --git a/source/libs/index/CMakeLists.txt b/source/libs/index/CMakeLists.txt index 6f3f48610c..246708926a 100644 --- a/source/libs/index/CMakeLists.txt +++ b/source/libs/index/CMakeLists.txt @@ -1,23 +1,22 @@ aux_source_directory(src INDEX_SRC) add_library(index STATIC ${INDEX_SRC}) target_include_directories( - index - PUBLIC "${TD_SOURCE_DIR}/include/libs/index" - PUBLIC "${TD_SOURCE_DIR}/include/os" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" - + index + PUBLIC "${TD_SOURCE_DIR}/include/libs/index" + PUBLIC "${TD_SOURCE_DIR}/include/os" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) target_link_libraries( - index - PUBLIC os - PUBLIC util - PUBLIC common - PUBLIC nodes - PUBLIC scalar - PUBLIC function + index + PUBLIC os + PUBLIC util + PUBLIC common + PUBLIC nodes + PUBLIC scalar + PUBLIC function ) -if (${BUILD_WITH_LUCENE}) +if(${BUILD_WITH_LUCENE}) target_include_directories( index PUBLIC "${TD_SOURCE_DIR}/deps/lucene/include" @@ -29,12 +28,10 @@ if (${BUILD_WITH_LUCENE}) ) endif(${BUILD_WITH_LUCENE}) -if (${BUILD_WITH_INVERTEDINDEX}) - add_definitions(-DUSE_INVERTED_INDEX) +if(${BUILD_WITH_INVERTEDINDEX}) + add_definitions(-DUSE_INVERTED_INDEX) endif(${BUILD_WITH_INVERTEDINDEX}) - -if (${BUILD_TEST}) - add_subdirectory(test) +if(${BUILD_TEST}) + add_subdirectory(test) endif(${BUILD_TEST}) - diff --git a/source/libs/monitor/CMakeLists.txt b/source/libs/monitor/CMakeLists.txt index cc8f40fa4c..23597718bf 100644 --- a/source/libs/monitor/CMakeLists.txt +++ b/source/libs/monitor/CMakeLists.txt @@ -9,5 +9,5 @@ target_include_directories( target_link_libraries(monitor os util common qcom transport monitorfw) if(${BUILD_TEST}) - add_subdirectory(test) + add_subdirectory(test) endif(${BUILD_TEST}) \ No newline at end of file diff --git a/source/libs/monitorfw/CMakeLists.txt b/source/libs/monitorfw/CMakeLists.txt index f08b2d6c2b..339a97fb94 100644 --- a/source/libs/monitorfw/CMakeLists.txt +++ b/source/libs/monitorfw/CMakeLists.txt @@ -5,7 +5,9 @@ target_include_directories( PUBLIC "${TD_SOURCE_DIR}/include/libs/monitorfw" PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) + if(${TD_DARWIN}) - target_compile_options(monitorfw PRIVATE -Wno-error=deprecated-pragma) + target_compile_options(monitorfw PRIVATE -Wno-error=deprecated-pragma) endif(${TD_DARWIN}) + target_link_libraries(monitorfw os util common transport) diff --git a/source/libs/parser/CMakeLists.txt b/source/libs/parser/CMakeLists.txt index c5ee1a00c4..f1b801c563 100644 --- a/source/libs/parser/CMakeLists.txt +++ b/source/libs/parser/CMakeLists.txt @@ -1,8 +1,8 @@ aux_source_directory(src PARSER_SRC) -IF (TD_ENTERPRISE) - LIST(APPEND PARSER_SRC ${TD_ENTERPRISE_DIR}/src/plugins/view/src/parserView.c) -ENDIF () +IF(TD_ENTERPRISE) + LIST(APPEND PARSER_SRC ${TD_ENTERPRISE_DIR}/src/plugins/view/src/parserView.c) +ENDIF() add_library(parser STATIC ${PARSER_SRC}) target_include_directories( @@ -17,5 +17,5 @@ target_link_libraries( ) if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/libs/qworker/CMakeLists.txt b/source/libs/qworker/CMakeLists.txt index 7a984cd000..5c5eafdbc5 100644 --- a/source/libs/qworker/CMakeLists.txt +++ b/source/libs/qworker/CMakeLists.txt @@ -8,8 +8,8 @@ target_include_directories( ) TARGET_LINK_LIBRARIES(qworker - PRIVATE os util transport nodes planner qcom executor index - ) + PRIVATE os util transport nodes planner qcom executor index +) if(${BUILD_TEST}) ADD_SUBDIRECTORY(test) diff --git a/source/libs/scheduler/CMakeLists.txt b/source/libs/scheduler/CMakeLists.txt index fafc2a27e0..c07d267f97 100644 --- a/source/libs/scheduler/CMakeLists.txt +++ b/source/libs/scheduler/CMakeLists.txt @@ -13,5 +13,5 @@ target_link_libraries( ) if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/libs/stream/CMakeLists.txt b/source/libs/stream/CMakeLists.txt index 27f5c46004..bb2a23b106 100644 --- a/source/libs/stream/CMakeLists.txt +++ b/source/libs/stream/CMakeLists.txt @@ -1,61 +1,59 @@ aux_source_directory(src STREAM_SRC) add_library(stream STATIC ${STREAM_SRC}) target_include_directories( - stream - PUBLIC "${TD_SOURCE_DIR}/include/libs/stream" - PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + stream + PUBLIC "${TD_SOURCE_DIR}/include/libs/stream" + PUBLIC "${TD_SOURCE_DIR}/include/libs/tcs" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) - if(${BUILD_WITH_ROCKSDB}) - if (${BUILD_CONTRIB}) + if(${BUILD_CONTRIB}) target_link_libraries( stream PUBLIC rocksdb tdb tcs - PRIVATE os util transport qcom executor wal index + PRIVATE os util transport qcom executor wal index ) target_include_directories( stream PUBLIC "${TD_SOURCE_DIR}/contrib/rocksdb/include" ) else() - if (TD_LINUX) + if(TD_LINUX) target_include_directories( - stream - PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" - ) - target_link_directories( - stream - PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" - ) - target_link_libraries( - stream - PUBLIC rocksdb tdb tcs - PRIVATE os util transport qcom executor wal index - ) - else() + stream + PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" + ) + target_link_directories( + stream + PUBLIC "${TD_SOURCE_DIR}/deps/${TD_DEPS_DIR}/rocksdb_static" + ) target_link_libraries( stream PUBLIC rocksdb tdb tcs - PRIVATE os util transport qcom executor wal index + PRIVATE os util transport qcom executor wal index + ) + else() + target_link_libraries( + stream + PUBLIC rocksdb tdb tcs + PRIVATE os util transport qcom executor wal index ) target_include_directories( stream PUBLIC "${TD_SOURCE_DIR}/contrib/rocksdb/include" ) endif() - endif() + endif() + add_definitions(-DUSE_ROCKSDB) endif(${BUILD_WITH_ROCKSDB}) - -#target_link_libraries( -# stream -# PUBLIC tdb -# PRIVATE os util transport qcom executor -#) - +# target_link_libraries( +# stream +# PUBLIC tdb +# PRIVATE os util transport qcom executor +# ) if(${BUILD_TEST}) - ADD_SUBDIRECTORY(test) + ADD_SUBDIRECTORY(test) endif(${BUILD_TEST}) diff --git a/source/libs/stream/test/CMakeLists.txt b/source/libs/stream/test/CMakeLists.txt index f2c985964d..ed66563225 100644 --- a/source/libs/stream/test/CMakeLists.txt +++ b/source/libs/stream/test/CMakeLists.txt @@ -1,81 +1,77 @@ - - # bloomFilterTest -#TARGET_LINK_LIBRARIES(streamUpdateTest - #PUBLIC os util common gtest gtest_main stream executor index - #) +# TARGET_LINK_LIBRARIES(streamUpdateTest +# PUBLIC os util common gtest gtest_main stream executor index +# ) -#TARGET_INCLUDE_DIRECTORIES( - #streamUpdateTest - #PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" - #PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" -#) +# TARGET_INCLUDE_DIRECTORIES( +# streamUpdateTest +# PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" +# PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" +# ) -#ADD_EXECUTABLE(checkpointTest checkpointTest.cpp) -#TARGET_LINK_LIBRARIES( - #checkpointTest - #PUBLIC os common gtest stream executor qcom index transport util -#) +# ADD_EXECUTABLE(checkpointTest checkpointTest.cpp) +# TARGET_LINK_LIBRARIES( +# checkpointTest +# PUBLIC os common gtest stream executor qcom index transport util +# ) -#TARGET_INCLUDE_DIRECTORIES( - #checkpointTest - #PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" -#) +# TARGET_INCLUDE_DIRECTORIES( +# checkpointTest +# PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" +# ) -#add_executable(backendTest "") +# add_executable(backendTest "") -#target_sources(backendTest - #PRIVATE - #"backendTest.cpp" -#) +# target_sources(backendTest +# PRIVATE +# "backendTest.cpp" +# ) -#TARGET_LINK_LIBRARIES( - #backendTest - #PUBLIC rocksdb - #PUBLIC os common gtest stream executor qcom index transport util -#) +# TARGET_LINK_LIBRARIES( +# backendTest +# PUBLIC rocksdb +# PUBLIC os common gtest stream executor qcom index transport util +# ) -#TARGET_INCLUDE_DIRECTORIES( - #backendTest - #PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" - #PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" -#) +# TARGET_INCLUDE_DIRECTORIES( +# backendTest +# PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" +# PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" +# ) -#add_test( - #NAME streamUpdateTest - #COMMAND streamUpdateTest -#) +# add_test( +# NAME streamUpdateTest +# COMMAND streamUpdateTest +# ) -#add_test( - #NAME checkpointTest - #COMMAND checkpointTest -#) -#add_test( - #NAME backendTest - #COMMAND backendTest -#) +# add_test( +# NAME checkpointTest +# COMMAND checkpointTest +# ) +# add_test( +# NAME backendTest +# COMMAND backendTest +# ) +# add_executable(backendTest "") -#add_executable(backendTest "") +# target_sources(backendTest +# PUBLIC +# "backendTest.cpp" +# ) -#target_sources(backendTest - #PUBLIC - #"backendTest.cpp" -#) - -#target_include_directories( - #backendTest - #PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" - #PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" -#) - -#target_link_libraries( - #backendTest - #PUBLIC rocksdb - #PUBLIC os common gtest stream executor qcom index transport util -#) +# target_include_directories( +# backendTest +# PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" +# PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" +# ) +# target_link_libraries( +# backendTest +# PUBLIC rocksdb +# PUBLIC os common gtest stream executor qcom index transport util +# ) MESSAGE(STATUS "build parser unit test") @@ -86,19 +82,19 @@ IF(NOT TD_DARWIN) ADD_EXECUTABLE(backendTest ${SOURCE_LIST}) TARGET_LINK_LIBRARIES( - backendTest - PUBLIC rocksdb - PUBLIC os common gtest stream executor qcom index transport util vnode + backendTest + PUBLIC rocksdb + PUBLIC os common gtest stream executor qcom index transport util vnode ) TARGET_INCLUDE_DIRECTORIES( - backendTest + backendTest PUBLIC "${TD_SOURCE_DIR}/include/libs/stream/" PRIVATE "${TD_SOURCE_DIR}/source/libs/stream/inc" ) ADD_TEST( - NAME backendTest - COMMAND backendTest + NAME backendTest + COMMAND backendTest ) -ENDIF () +ENDIF() diff --git a/source/libs/tcs/CMakeLists.txt b/source/libs/tcs/CMakeLists.txt index e0de823c7a..95c167d737 100644 --- a/source/libs/tcs/CMakeLists.txt +++ b/source/libs/tcs/CMakeLists.txt @@ -11,6 +11,7 @@ target_link_libraries( tcs PUBLIC az PUBLIC common + # PUBLIC cjson # PUBLIC os # PUBLIC util diff --git a/source/libs/tcs/test/CMakeLists.txt b/source/libs/tcs/test/CMakeLists.txt index 1252736b33..909128db37 100644 --- a/source/libs/tcs/test/CMakeLists.txt +++ b/source/libs/tcs/test/CMakeLists.txt @@ -1,22 +1,20 @@ -if (TD_LINUX) +if(TD_LINUX) + aux_source_directory(. TCS_TEST_SRC) -aux_source_directory(. TCS_TEST_SRC) - -add_executable(tcsTest ${TCS_TEST_SRC}) -target_include_directories(tcsTest - PUBLIC - "${TD_SOURCE_DIR}/include/libs/tcs" - "${CMAKE_CURRENT_SOURCE_DIR}/../inc" -) - -target_link_libraries(tcsTest - tcs - gtest_main -) -enable_testing() -add_test( - NAME tcs_test - COMMAND tcsTest -) + add_executable(tcsTest ${TCS_TEST_SRC}) + target_include_directories(tcsTest + PUBLIC + "${TD_SOURCE_DIR}/include/libs/tcs" + "${CMAKE_CURRENT_SOURCE_DIR}/../inc" + ) + target_link_libraries(tcsTest + tcs + gtest_main + ) + enable_testing() + add_test( + NAME tcs_test + COMMAND tcsTest + ) endif() diff --git a/source/libs/tfs/CMakeLists.txt b/source/libs/tfs/CMakeLists.txt index ef1afa01a1..98572f94d8 100644 --- a/source/libs/tfs/CMakeLists.txt +++ b/source/libs/tfs/CMakeLists.txt @@ -9,5 +9,5 @@ target_include_directories( target_link_libraries(tfs os util common monitor) if(${BUILD_TEST}) - add_subdirectory(test) + add_subdirectory(test) endif(${BUILD_TEST}) \ No newline at end of file diff --git a/source/libs/transport/CMakeLists.txt b/source/libs/transport/CMakeLists.txt index a48926d2d4..6ad130017a 100644 --- a/source/libs/transport/CMakeLists.txt +++ b/source/libs/transport/CMakeLists.txt @@ -1,34 +1,30 @@ aux_source_directory(src TRANSPORT_SRC) add_library(transport STATIC ${TRANSPORT_SRC}) target_include_directories( - transport - PUBLIC "${TD_SOURCE_DIR}/include/libs/transport" - PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" + transport + PUBLIC "${TD_SOURCE_DIR}/include/libs/transport" + PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}/inc" ) target_link_libraries( - transport - PUBLIC lz4_static - PUBLIC os - PUBLIC util - PUBLIC common - PUBLIC zlibstatic + transport + PUBLIC lz4_static + PUBLIC os + PUBLIC util + PUBLIC common + PUBLIC zlibstatic ) -if (${BUILD_WITH_UV_TRANS}) -if (${BUILD_WITH_UV}) - target_link_libraries( - transport - PUBLIC uv_a - ) - add_definitions(-DUSE_UV) -endif(${BUILD_WITH_UV}) -endif(${BUILD_WITH_UV_TRANS}) -if (${BUILD_TEST}) +if(${BUILD_WITH_UV_TRANS}) + if(${BUILD_WITH_UV}) + target_link_libraries( + transport + PUBLIC uv_a + ) + add_definitions(-DUSE_UV) + endif(${BUILD_WITH_UV}) +endif(${BUILD_WITH_UV_TRANS}) + +if(${BUILD_TEST}) add_subdirectory(test) endif(${BUILD_TEST}) - - - - - diff --git a/source/util/CMakeLists.txt b/source/util/CMakeLists.txt index 4972e9f50b..063988ea00 100644 --- a/source/util/CMakeLists.txt +++ b/source/util/CMakeLists.txt @@ -1,19 +1,20 @@ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/src/version.c.in" "${CMAKE_CURRENT_SOURCE_DIR}/src/version.c") aux_source_directory(src UTIL_SRC) add_library(util STATIC ${UTIL_SRC}) -if (DEFINED GRANT_CFG_INCLUDE_DIR) - add_definitions(-DGRANTS_CFG) + +if(DEFINED GRANT_CFG_INCLUDE_DIR) + add_definitions(-DGRANTS_CFG) endif() -IF (${ASSERT_NOT_CORE}) +if(${ASSERT_NOT_CORE}) ADD_DEFINITIONS(-DASSERT_NOT_CORE) MESSAGE(STATUS "disable assert core") -ELSE () +else() MESSAGE(STATUS "enable assert core") -ENDIF (${ASSERT_NOT_CORE}) +endif(${ASSERT_NOT_CORE}) if(${BUILD_WITH_ANALYSIS}) - add_definitions(-DUSE_ANAL) + add_definitions(-DUSE_ANAL) endif() target_include_directories( @@ -34,7 +35,7 @@ target_link_directories( PUBLIC "${TD_SOURCE_DIR}/contrib/pcre2" ) -if (TD_LINUX) +if(TD_LINUX) target_link_libraries( util PUBLIC os common @@ -43,10 +44,10 @@ if (TD_LINUX) ) else() target_link_libraries( - util - PUBLIC os common - PUBLIC lz4_static pcre2-8 - PUBLIC api cjson geos_c TSZ + util + PUBLIC os common + PUBLIC lz4_static pcre2-8 + PUBLIC api cjson geos_c TSZ ) endif() diff --git a/tools/CMakeLists.txt b/tools/CMakeLists.txt index a16a03d30a..87630b773b 100644 --- a/tools/CMakeLists.txt +++ b/tools/CMakeLists.txt @@ -1,21 +1,23 @@ -IF (TD_WEBSOCKET) - IF (TD_LINUX) +IF(TD_WEBSOCKET) + IF(TD_LINUX) SET(websocket_lib_file "libtaosws.so") - ELSEIF (TD_DARWIN) + ELSEIF(TD_DARWIN) SET(websocket_lib_file "libtaosws.dylib") - ENDIF () + ENDIF() + MESSAGE("${Green} use libtaos-ws${ColourReset}") - IF (TD_ALPINE) + + IF(TD_ALPINE) include(ExternalProject) ExternalProject_Add(taosws-rs - PREFIX "taosws-rs" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 + PREFIX "taosws-rs" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 CONFIGURE_COMMAND cmake -E echo "taosws-rs no need cmake to config" PATCH_COMMAND - COMMAND git clean -f -d + COMMAND git clean -f -d BUILD_COMMAND COMMAND cargo update COMMAND RUSTFLAGS=-Ctarget-feature=-crt-static cargo build --release -p taos-ws-sys --features rustls @@ -23,18 +25,18 @@ IF (TD_WEBSOCKET) COMMAND cp target/release/${websocket_lib_file} ${CMAKE_BINARY_DIR}/build/lib COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/build/include COMMAND cmake -E copy target/release/taosws.h ${CMAKE_BINARY_DIR}/build/include - ) - ELSEIF (TD_WINDOWS) + ) + ELSEIF(TD_WINDOWS) include(ExternalProject) ExternalProject_Add(taosws-rs - PREFIX "taosws-rs" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 + PREFIX "taosws-rs" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 CONFIGURE_COMMAND cmake -E echo "taosws-rs no need cmake to config" PATCH_COMMAND - COMMAND git clean -f -d + COMMAND git clean -f -d BUILD_COMMAND COMMAND cargo update COMMAND cargo build --release -p taos-ws-sys --features rustls @@ -43,18 +45,18 @@ IF (TD_WEBSOCKET) COMMAND cp target/release/taosws.dll.lib ${CMAKE_BINARY_DIR}/build/lib/taosws.lib COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/build/include COMMAND cmake -E copy target/release/taosws.h ${CMAKE_BINARY_DIR}/build/include - ) + ) ELSE() include(ExternalProject) ExternalProject_Add(taosws-rs - PREFIX "taosws-rs" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 + PREFIX "taosws-rs" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosws-rs + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 CONFIGURE_COMMAND cmake -E echo "taosws-rs no need cmake to config" PATCH_COMMAND - COMMAND git clean -f -d + COMMAND git clean -f -d BUILD_COMMAND COMMAND cargo update COMMAND cargo build --release -p taos-ws-sys --features rustls @@ -62,11 +64,11 @@ IF (TD_WEBSOCKET) COMMAND cp target/release/${websocket_lib_file} ${CMAKE_BINARY_DIR}/build/lib COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/build/include COMMAND cmake -E copy target/release/taosws.h ${CMAKE_BINARY_DIR}/build/include - ) - ENDIF () -ENDIF () + ) + ENDIF() +ENDIF() -IF (TD_TAOS_TOOLS) +IF(TD_TAOS_TOOLS) INCLUDE_DIRECTORIES(${TD_SOURCE_DIR}/tools/taos_tools/deps/avro/lang/c/src) INCLUDE_DIRECTORIES(${TD_SOURCE_DIR}/include/client) INCLUDE_DIRECTORIES(${TD_SOURCE_DIR}/include/common) @@ -74,69 +76,74 @@ IF (TD_TAOS_TOOLS) INCLUDE_DIRECTORIES(${TD_SOURCE_DIR}/include/os) INCLUDE_DIRECTORIES(${TD_SOURCE_DIR}/include/libs/transport) ADD_SUBDIRECTORY(taos-tools) -ENDIF () +ENDIF() add_subdirectory(shell) -IF (TD_BUILD_HTTP) + +IF(TD_BUILD_HTTP) MESSAGE("") MESSAGE("${Yellow} use original embedded httpd ${ColourReset}") MESSAGE("") - # ADD_SUBDIRECTORY(http) + +# ADD_SUBDIRECTORY(http) ELSEIF(TD_BUILD_TAOSA_INTERNAL) MESSAGE("${Yellow} use taosa internal as httpd ${ColourReset}") -ELSE () +ELSE() MESSAGE("") MESSAGE("${Green} use taosadapter as httpd, platform is ${PLATFORM_ARCH_STR} ${ColourReset}") EXECUTE_PROCESS( - COMMAND git rev-parse --abbrev-ref HEAD - RESULT_VARIABLE result_taos_version - OUTPUT_VARIABLE taos_version + COMMAND git rev-parse --abbrev-ref HEAD + RESULT_VARIABLE result_taos_version + OUTPUT_VARIABLE taos_version ) STRING(FIND ${taos_version} release is_release_branch) - IF ("${is_release_branch}" STREQUAL "0") + IF("${is_release_branch}" STREQUAL "0") STRING(SUBSTRING "${taos_version}" 12 -1 taos_version) STRING(STRIP "${taos_version}" taos_version) - ELSE () + ELSE() STRING(CONCAT taos_version "_branch_" "${taos_version}") STRING(STRIP "${taos_version}" taos_version) - ENDIF () + ENDIF() + EXECUTE_PROCESS( - COMMAND cd ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter + COMMAND cd ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter ) EXECUTE_PROCESS( - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter - COMMAND git rev-parse HEAD - RESULT_VARIABLE commit_sha1 - OUTPUT_VARIABLE taosadapter_commit_sha1 + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter + COMMAND git rev-parse HEAD + RESULT_VARIABLE commit_sha1 + OUTPUT_VARIABLE taosadapter_commit_sha1 ) - IF ("${taosadapter_commit_sha1}" STREQUAL "") + + IF("${taosadapter_commit_sha1}" STREQUAL "") SET(taosadapter_commit_sha1 "unknown") - ELSE () -# STRING(SUBSTRING "${taosadapter_commit_sha1}" 0 7 taosadapter_commit_sha1) + ELSE() + # STRING(SUBSTRING "${taosadapter_commit_sha1}" 0 7 taosadapter_commit_sha1) STRING(STRIP "${taosadapter_commit_sha1}" taosadapter_commit_sha1) - ENDIF () + ENDIF() + SET(taos_version ${TD_VER_NUMBER}) MESSAGE("${Green} taosAdapter will use ${taos_version} and commit ${taosadapter_commit_sha1} as version ${ColourReset}") EXECUTE_PROCESS( - COMMAND cd .. + COMMAND cd .. ) MESSAGE("CURRENT SOURCE DIR ${CMAKE_CURRENT_SOURCE_DIR}") - IF (TD_WINDOWS) + IF(TD_WINDOWS) MESSAGE("Building taosAdapter on Windows") INCLUDE(ExternalProject) ExternalProject_Add(taosadapter - PREFIX "taosadapter" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 + PREFIX "taosadapter" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 CONFIGURE_COMMAND cmake -E echo "taosadapter no need cmake to config" PATCH_COMMAND - COMMAND git clean -f -d + COMMAND git clean -f -d BUILD_COMMAND COMMAND set CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client COMMAND set CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib @@ -153,58 +160,61 @@ ELSE () COMMAND cmake -E copy ./example/config/taosadapter.toml ${CMAKE_BINARY_DIR}/test/cfg/ COMMAND cmake -E echo "Copy taosadapter-debug.exe" COMMAND cmake -E copy taosadapter-debug.exe ${CMAKE_BINARY_DIR}/build/bin - ) - ELSEIF (TD_DARWIN) + ) + ELSEIF(TD_DARWIN) MESSAGE("Building taosAdapter on MACOS") INCLUDE(ExternalProject) ExternalProject_Add(taosadapter - PREFIX "taosadapter" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND cmake -E echo "taosadapter no need cmake to config" - PATCH_COMMAND - COMMAND git clean -f -d - BUILD_COMMAND - COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -ldflags "-s -w -X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" - COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -o taosadapter-debug -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" - INSTALL_COMMAND - COMMAND cmake -E echo "Copy taosadapter" - COMMAND cmake -E copy taosadapter ${CMAKE_BINARY_DIR}/build/bin - COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/test/cfg/ - COMMAND cmake -E echo "Copy taosadapter.toml" - COMMAND cmake -E copy ./example/config/taosadapter.toml ${CMAKE_BINARY_DIR}/test/cfg/ - COMMAND cmake -E copy ./taosadapter.service ${CMAKE_BINARY_DIR}/test/cfg/ - COMMAND cmake -E echo "Copy taosadapter-debug" - COMMAND cmake -E copy taosadapter-debug ${CMAKE_BINARY_DIR}/build/bin - ) - ELSE () - MESSAGE("Building taosAdapter on non-Windows") - INCLUDE(ExternalProject) - ExternalProject_Add(taosadapter - PREFIX "taosadapter" - SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter - BUILD_ALWAYS off - DEPENDS taos - BUILD_IN_SOURCE 1 + PREFIX "taosadapter" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 CONFIGURE_COMMAND cmake -E echo "taosadapter no need cmake to config" PATCH_COMMAND - COMMAND git clean -f -d + COMMAND git clean -f -d BUILD_COMMAND - COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" -# COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -o taosadapter-debug -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" + COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -ldflags "-s -w -X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" + COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -o taosadapter-debug -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" INSTALL_COMMAND -# COMMAND cmake -E echo "Comparessing taosadapter.exe" -# COMMAND upx taosadapter || : COMMAND cmake -E echo "Copy taosadapter" COMMAND cmake -E copy taosadapter ${CMAKE_BINARY_DIR}/build/bin COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/test/cfg/ COMMAND cmake -E echo "Copy taosadapter.toml" COMMAND cmake -E copy ./example/config/taosadapter.toml ${CMAKE_BINARY_DIR}/test/cfg/ COMMAND cmake -E copy ./taosadapter.service ${CMAKE_BINARY_DIR}/test/cfg/ -# COMMAND cmake -E echo "Copy taosadapter-debug" -# COMMAND cmake -E copy taosadapter-debug ${CMAKE_BINARY_DIR}/build/bin - ) - ENDIF () -ENDIF () + COMMAND cmake -E echo "Copy taosadapter-debug" + COMMAND cmake -E copy taosadapter-debug ${CMAKE_BINARY_DIR}/build/bin + ) + ELSE() + MESSAGE("Building taosAdapter on non-Windows") + INCLUDE(ExternalProject) + ExternalProject_Add(taosadapter + PREFIX "taosadapter" + SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/taosadapter + BUILD_ALWAYS off + DEPENDS taos + BUILD_IN_SOURCE 1 + CONFIGURE_COMMAND cmake -E echo "taosadapter no need cmake to config" + PATCH_COMMAND + COMMAND git clean -f -d + BUILD_COMMAND + COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" + + # COMMAND CGO_CFLAGS=-I${CMAKE_CURRENT_SOURCE_DIR}/../include/client CGO_LDFLAGS=-L${CMAKE_BINARY_DIR}/build/lib go build -a -o taosadapter-debug -ldflags "-X 'github.com/taosdata/taosadapter/v3/version.Version=${taos_version}' -X 'github.com/taosdata/taosadapter/v3/version.CommitID=${taosadapter_commit_sha1}' -X 'github.com/taosdata/taosadapter/v3/version.BuildInfo=${TD_VER_OSTYPE}-${TD_VER_CPUTYPE} ${TD_VER_DATE}'" + INSTALL_COMMAND + + # COMMAND cmake -E echo "Comparessing taosadapter.exe" + # COMMAND upx taosadapter || : + COMMAND cmake -E echo "Copy taosadapter" + COMMAND cmake -E copy taosadapter ${CMAKE_BINARY_DIR}/build/bin + COMMAND cmake -E make_directory ${CMAKE_BINARY_DIR}/test/cfg/ + COMMAND cmake -E echo "Copy taosadapter.toml" + COMMAND cmake -E copy ./example/config/taosadapter.toml ${CMAKE_BINARY_DIR}/test/cfg/ + COMMAND cmake -E copy ./taosadapter.service ${CMAKE_BINARY_DIR}/test/cfg/ + + # COMMAND cmake -E echo "Copy taosadapter-debug" + # COMMAND cmake -E copy taosadapter-debug ${CMAKE_BINARY_DIR}/build/bin + ) + ENDIF() +ENDIF() diff --git a/tools/shell/CMakeLists.txt b/tools/shell/CMakeLists.txt index 0ce181808f..fd46870ac5 100644 --- a/tools/shell/CMakeLists.txt +++ b/tools/shell/CMakeLists.txt @@ -2,41 +2,41 @@ aux_source_directory(src SHELL_SRC) add_executable(shell ${SHELL_SRC}) -IF (TD_LINUX_64 AND JEMALLOC_ENABLED) +IF(TD_LINUX_64 AND JEMALLOC_ENABLED) ADD_DEFINITIONS(-DTD_JEMALLOC_ENABLED -I${CMAKE_BINARY_DIR}/build/include -L${CMAKE_BINARY_DIR}/build/lib -Wl,-rpath,${CMAKE_BINARY_DIR}/build/lib -ljemalloc) SET(LINK_JEMALLOC "-L${CMAKE_BINARY_DIR}/build/lib -ljemalloc") ADD_DEPENDENCIES(shell jemalloc) -ELSE () +ELSE() SET(LINK_JEMALLOC "") -ENDIF () +ENDIF() -IF (TD_LINUX AND TD_WEBSOCKET) +IF(TD_LINUX AND TD_WEBSOCKET) ADD_DEFINITIONS(-DWEBSOCKET -I${CMAKE_BINARY_DIR}/build/include -ltaosws) SET(LINK_WEBSOCKET "-L${CMAKE_BINARY_DIR}/build/lib -ltaosws") ADD_DEPENDENCIES(shell taosws-rs) -ELSEIF (TD_DARWIN AND TD_WEBSOCKET) +ELSEIF(TD_DARWIN AND TD_WEBSOCKET) ADD_DEFINITIONS(-DWEBSOCKET -I${CMAKE_BINARY_DIR}/build/include) SET(LINK_WEBSOCKET "${CMAKE_BINARY_DIR}/build/lib/libtaosws.dylib") ADD_DEPENDENCIES(shell taosws-rs) -ELSEIF (TD_WINDOWS AND TD_WEBSOCKET) +ELSEIF(TD_WINDOWS AND TD_WEBSOCKET) ADD_DEFINITIONS(-DWEBSOCKET -I${CMAKE_BINARY_DIR}/build/include) SET(LINK_WEBSOCKET "${CMAKE_BINARY_DIR}/build/lib/taosws.lib") ADD_DEPENDENCIES(shell taosws-rs) -ELSE () +ELSE() SET(LINK_WEBSOCKET "") -ENDIF () +ENDIF() -IF (TD_LINUX AND TD_ALPINE) +IF(TD_LINUX AND TD_ALPINE) SET(LINK_ARGP "/usr/lib/libargp.a") -ELSE () +ELSE() SET(LINK_ARGP "") -ENDIF () +ENDIF() if(TD_WINDOWS) target_link_libraries(shell PUBLIC taos_static ${LINK_WEBSOCKET}) else() target_link_libraries(shell PUBLIC taos ${LINK_WEBSOCKET} ${LINK_JEMALLOC} ${LINK_ARGP}) -endif () +endif() target_link_libraries( shell diff --git a/utils/CMakeLists.txt b/utils/CMakeLists.txt index 6f0c3b5247..9872a9dc55 100644 --- a/utils/CMakeLists.txt +++ b/utils/CMakeLists.txt @@ -1,8 +1,8 @@ -#ADD_SUBDIRECTORY(examples/c) +# ADD_SUBDIRECTORY(examples/c) ADD_SUBDIRECTORY(tsim) ADD_SUBDIRECTORY(test/c) -#ADD_SUBDIRECTORY(comparisonTest/tdengine) -IF (NOT "${TSZ_ENABLED}" MATCHES "false") +# ADD_SUBDIRECTORY(comparisonTest/tdengine) +IF(NOT "${TSZ_ENABLED}" MATCHES "false") ADD_SUBDIRECTORY(TSZ) ENDIF() \ No newline at end of file diff --git a/utils/TSZ/CMakeLists.txt b/utils/TSZ/CMakeLists.txt index e3f4dce3c9..ba335fe769 100644 --- a/utils/TSZ/CMakeLists.txt +++ b/utils/TSZ/CMakeLists.txt @@ -6,22 +6,20 @@ INCLUDE_DIRECTORIES(sz/inc) INCLUDE_DIRECTORIES(zstd/) INCLUDE_DIRECTORIES(zstd/common/) - # source -AUX_SOURCE_DIRECTORY(sz/src SRC1) +AUX_SOURCE_DIRECTORY(sz/src SRC1) AUX_SOURCE_DIRECTORY(zstd/dictBuilder SRC2) -AUX_SOURCE_DIRECTORY(zstd/common SRC3) -AUX_SOURCE_DIRECTORY(zstd/compress SRC4) -AUX_SOURCE_DIRECTORY(zstd/decompress SRC5) -AUX_SOURCE_DIRECTORY(zstd/deprecated SRC6) -AUX_SOURCE_DIRECTORY(zstd/legacy SRC7) - +AUX_SOURCE_DIRECTORY(zstd/common SRC3) +AUX_SOURCE_DIRECTORY(zstd/compress SRC4) +AUX_SOURCE_DIRECTORY(zstd/decompress SRC5) +AUX_SOURCE_DIRECTORY(zstd/deprecated SRC6) +AUX_SOURCE_DIRECTORY(zstd/legacy SRC7) # archive ADD_LIBRARY(TSZ STATIC ${SRC1} ${SRC2} ${SRC3} ${SRC4} ${SRC5} ${SRC6} ${SRC7}) TARGET_INCLUDE_DIRECTORIES(TSZ PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/sz/inc ${TD_SOURCE_DIR}/include) # windows ignore warning -IF (TD_WINDOWS) - SET_TARGET_PROPERTIES(TSZ PROPERTIES COMPILE_FLAGS -w) -ENDIF () +IF(TD_WINDOWS) + SET_TARGET_PROPERTIES(TSZ PROPERTIES COMPILE_FLAGS -w) +ENDIF() diff --git a/utils/test/c/CMakeLists.txt b/utils/test/c/CMakeLists.txt index 8701f208bb..7589d11840 100644 --- a/utils/test/c/CMakeLists.txt +++ b/utils/test/c/CMakeLists.txt @@ -17,7 +17,7 @@ add_executable(varbinary_test varbinary_test.c) add_executable(replay_test replay_test.c) if(${TD_LINUX}) -add_executable(tsz_test tsz_test.c) + add_executable(tsz_test tsz_test.c) endif(${TD_LINUX}) target_link_libraries( @@ -124,7 +124,6 @@ target_link_libraries( PUBLIC common PUBLIC os PUBLIC geometry - ) target_link_libraries( @@ -144,11 +143,11 @@ target_link_libraries( ) if(${TD_LINUX}) -target_link_libraries( - tsz_test - PUBLIC taos - PUBLIC util - PUBLIC common - PUBLIC os -) + target_link_libraries( + tsz_test + PUBLIC taos + PUBLIC util + PUBLIC common + PUBLIC os + ) endif(${TD_LINUX}) \ No newline at end of file From afb71f1536d0388efb70df38417067f5f91051be Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 24 Oct 2024 10:42:39 +0800 Subject: [PATCH 087/102] test/duration: fix duration values --- tests/develop-test/2-query/show_create_db.py | 12 ++++++------ tests/script/tsim/insert/commit-merge0.sim | 2 +- tests/script/tsim/query/interval-offset.sim | 2 +- tests/script/tsim/sma/drop_sma.sim | 8 ++++---- tests/script/tsim/sma/sma_leak.sim | 8 ++++---- tests/system-test/2-query/distribute_agg_count.py | 2 +- tests/system-test/2-query/join.py | 2 +- tests/system-test/2-query/max.py | 2 +- 8 files changed, 19 insertions(+), 19 deletions(-) diff --git a/tests/develop-test/2-query/show_create_db.py b/tests/develop-test/2-query/show_create_db.py index d7d093aa78..d6dcffcbb4 100644 --- a/tests/develop-test/2-query/show_create_db.py +++ b/tests/develop-test/2-query/show_create_db.py @@ -42,17 +42,17 @@ class TDTestCase: tdSql.query('show create database scd;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd') - tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") tdSql.query('show create database scd2;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd2') - tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") tdSql.query('show create database scd4') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd4') - tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") self.restartTaosd(1, dbname='scd') @@ -60,16 +60,16 @@ class TDTestCase: tdSql.query('show create database scd;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd') - tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") tdSql.query('show create database scd2;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd2') - tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") tdSql.query('show create database scd4') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd4') - tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 0") + tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") tdSql.execute('drop database scd') diff --git a/tests/script/tsim/insert/commit-merge0.sim b/tests/script/tsim/insert/commit-merge0.sim index da66560cbd..78dbb44d61 100644 --- a/tests/script/tsim/insert/commit-merge0.sim +++ b/tests/script/tsim/insert/commit-merge0.sim @@ -4,7 +4,7 @@ system sh/exec.sh -n dnode1 -s start sql connect print =============== create database -sql create database db duration 300 keep 365000d,365000d,365000d +sql create database db duration 120 keep 365000d,365000d,365000d sql select * from information_schema.ins_databases if $rows != 3 then return -1 diff --git a/tests/script/tsim/query/interval-offset.sim b/tests/script/tsim/query/interval-offset.sim index fe3e4c9844..50b3efdc39 100644 --- a/tests/script/tsim/query/interval-offset.sim +++ b/tests/script/tsim/query/interval-offset.sim @@ -4,7 +4,7 @@ system sh/exec.sh -n dnode1 -s start sql connect print =============== create database -sql create database d0 duration 300 +sql create database d0 duration 120 sql use d0 print =============== create super table and child table diff --git a/tests/script/tsim/sma/drop_sma.sim b/tests/script/tsim/sma/drop_sma.sim index 7121f402fa..b88fdc8955 100644 --- a/tests/script/tsim/sma/drop_sma.sim +++ b/tests/script/tsim/sma/drop_sma.sim @@ -126,21 +126,21 @@ sql drop table stb; print ========== step5 sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint, c_float float, c_double double, c_bool bool, c_binary binary(16), c_nchar nchar(32), c_ts timestamp, c_tint_un tinyint unsigned, c_sint_un smallint unsigned, c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); sql CREATE SMA INDEX sma_index_1 ON stb1 function(min(c_int), max(c_int)) interval(6m, 10s) sliding(6m) watermark 5s; print ========== step6 repeat sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint ) tags (t_int int); sql CREATE SMA INDEX sma_index_1 ON stb1 function(min(c_int), max(c_int)) interval(6m, 10s) sliding(6m) watermark 5s; print ========== step7 sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint,c_float float, c_double double, c_bool bool,c_binary binary(16), c_nchar nchar(32), c_ts timestamp,c_tint_un tinyint unsigned, c_sint_un smallint unsigned,c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); @@ -160,7 +160,7 @@ sql DROP INDEX sma_index_3 ; print ========== step8 sql drop database if exists db; sleep 2000 -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint,c_float float, c_double double, c_bool bool,c_binary binary(16), c_nchar nchar(32), c_ts timestamp,c_tint_un tinyint unsigned, c_sint_un smallint unsigned,c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); diff --git a/tests/script/tsim/sma/sma_leak.sim b/tests/script/tsim/sma/sma_leak.sim index 4f2d1ebeb0..14f03541b5 100644 --- a/tests/script/tsim/sma/sma_leak.sim +++ b/tests/script/tsim/sma/sma_leak.sim @@ -98,21 +98,21 @@ sql drop table stb; print ========== step5 sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint, c_float float, c_double double, c_bool bool, c_binary binary(16), c_nchar nchar(32), c_ts timestamp, c_tint_un tinyint unsigned, c_sint_un smallint unsigned, c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); sql CREATE SMA INDEX sma_index_1 ON stb1 function(min(c_int), max(c_int)) interval(6m, 10s) sliding(6m) watermark 5s; print ========== step6 repeat sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint ) tags (t_int int); sql CREATE SMA INDEX sma_index_1 ON stb1 function(min(c_int), max(c_int)) interval(6m, 10s) sliding(6m) watermark 5s; print ========== step7 sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint,c_float float, c_double double, c_bool bool,c_binary binary(16), c_nchar nchar(32), c_ts timestamp,c_tint_un tinyint unsigned, c_sint_un smallint unsigned,c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); @@ -131,7 +131,7 @@ sql DROP INDEX sma_index_3 ; print ========== step8 sql drop database if exists db; -sql create database db duration 300; +sql create database db duration 120; sql use db; sql create table stb1(ts timestamp, c_int int, c_bint bigint, c_sint smallint, c_tint tinyint,c_float float, c_double double, c_bool bool,c_binary binary(16), c_nchar nchar(32), c_ts timestamp,c_tint_un tinyint unsigned, c_sint_un smallint unsigned,c_int_un int unsigned, c_bint_un bigint unsigned) tags (t_int int); diff --git a/tests/system-test/2-query/distribute_agg_count.py b/tests/system-test/2-query/distribute_agg_count.py index 7d131cd77d..fdcf270402 100644 --- a/tests/system-test/2-query/distribute_agg_count.py +++ b/tests/system-test/2-query/distribute_agg_count.py @@ -36,7 +36,7 @@ class TDTestCase: def prepare_datas_of_distribute(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f" use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 diff --git a/tests/system-test/2-query/join.py b/tests/system-test/2-query/join.py index c5c8f6c730..1c303b6d96 100644 --- a/tests/system-test/2-query/join.py +++ b/tests/system-test/2-query/join.py @@ -370,7 +370,7 @@ class TDTestCase: tdLog.printNoPrefix("==========step4:cross db check") dbname1 = "db1" - tdSql.execute(f"create database {dbname1} duration 432000m") + tdSql.execute(f"create database {dbname1} duration 172800m") tdSql.execute(f"use {dbname1}") self.__create_tb(dbname=dbname1) self.__insert_data(dbname=dbname1) diff --git a/tests/system-test/2-query/max.py b/tests/system-test/2-query/max.py index ba6ab53fc7..5649055838 100644 --- a/tests/system-test/2-query/max.py +++ b/tests/system-test/2-query/max.py @@ -117,7 +117,7 @@ class TDTestCase: def support_distributed_aggregate(self, dbname="testdb"): # prepate datas for 20 tables distributed at different vgroups - tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 1000 vgroups 5") + tdSql.execute(f"create database if not exists {dbname} keep 3650 duration 100 vgroups 5") tdSql.execute(f"use {dbname} ") tdSql.execute( f'''create table {dbname}.stb1 From 20c54a24f0ef025c71bdd450f0672b1bcf594933 Mon Sep 17 00:00:00 2001 From: 54liuyao <54liuyao@163.com> Date: Thu, 24 Oct 2024 11:22:55 +0800 Subject: [PATCH 088/102] avoid free memory incorrectly --- .../executor/src/streamtimewindowoperator.c | 3 +- tests/script/tsim/stream/basic4.sim | 196 +++++++++++++++++- 2 files changed, 195 insertions(+), 4 deletions(-) diff --git a/source/libs/executor/src/streamtimewindowoperator.c b/source/libs/executor/src/streamtimewindowoperator.c index be27f277c0..83b9862ba6 100644 --- a/source/libs/executor/src/streamtimewindowoperator.c +++ b/source/libs/executor/src/streamtimewindowoperator.c @@ -497,6 +497,7 @@ void destroyStreamFinalIntervalOperatorInfo(void* param) { blockDataDestroy(pInfo->pMidRetriveRes); blockDataDestroy(pInfo->pMidPulloverRes); if (pInfo->pUpdatedMap != NULL) { + // free flushed pos tSimpleHashSetFreeFp(pInfo->pUpdatedMap, destroyFlusedppPos); tSimpleHashCleanup(pInfo->pUpdatedMap); pInfo->pUpdatedMap = NULL; @@ -1967,7 +1968,6 @@ int32_t createStreamFinalIntervalOperatorInfo(SOperatorInfo* downstream, SPhysiN code = initAggSup(&pOperator->exprSupp, &pInfo->aggSup, pExprInfo, numOfCols, keyBufSize, pTaskInfo->id.str, pInfo->pState, &pTaskInfo->storageAPI.functionStore); QUERY_CHECK_CODE(code, lino, _error); - tSimpleHashSetFreeFp(pInfo->aggSup.pResultRowHashTable, destroyFlusedppPos); code = initExecTimeWindowInfo(&pInfo->twAggSup.timeWindowData, &pTaskInfo->window); QUERY_CHECK_CODE(code, lino, _error); @@ -5346,7 +5346,6 @@ int32_t createStreamIntervalOperatorInfo(SOperatorInfo* downstream, SPhysiNode* code = initAggSup(pSup, &pInfo->aggSup, pExprInfo, numOfCols, keyBufSize, pTaskInfo->id.str, pInfo->pState, &pTaskInfo->storageAPI.functionStore); QUERY_CHECK_CODE(code, lino, _error); - tSimpleHashSetFreeFp(pInfo->aggSup.pResultRowHashTable, destroyFlusedppPos); if (pIntervalPhyNode->window.pExprs != NULL) { int32_t numOfScalar = 0; diff --git a/tests/script/tsim/stream/basic4.sim b/tests/script/tsim/stream/basic4.sim index 8868c3fd60..cadce94ef4 100644 --- a/tests/script/tsim/stream/basic4.sim +++ b/tests/script/tsim/stream/basic4.sim @@ -189,10 +189,10 @@ $loop_count = 0 loop4: -sleep 200 +sleep 500 $loop_count = $loop_count + 1 -if $loop_count == 10 then +if $loop_count == 20 then return -1 endi @@ -324,5 +324,197 @@ if $data[29][1] != 2 then goto loop7 endi +print step4==== + +sql create database test4 vgroups 1; +sql use test4; + +sql create stable st(ts timestamp,a int,b int,c int, d double) tags(ta int,tb int,tc int); +sql create table t1 using st tags(1,1,1); +sql create table t2 using st tags(2,2,2); +sql create table t3 using st tags(2,2,2); +sql create table t4 using st tags(2,2,2); +sql create table t5 using st tags(2,2,2); +sql create table t6 using st tags(2,2,2); + +sql create stream streams4 trigger window_close IGNORE EXPIRED 0 into streamt as select _wstart, count(*), now from st partition by tbname interval(1s); +sql create stream streams5 trigger window_close IGNORE EXPIRED 0 into streamt1 as select _wstart, count(*), now from st partition by b interval(1s); + +run tsim/stream/checkTaskStatus.sim + +sql insert into t1 values(1648791211000,1,1,1,1.1) t2 values (1648791211000,2,2,2,2.1) t3 values(1648791211000,3,3,3,3.1) t4 values(1648791211000,4,4,4,4.1) t5 values (1648791211000,5,5,5,5.1) t6 values(1648791211000,6,6,6,6.1); + +sql insert into t1 values(now,1,1,1,1.1) t2 values (now,2,2,2,2.1) t3 values(now,3,3,3,3.1) t4 values(now,4,4,4,4.1) t5 values (now,5,5,5,5.1) t6 values(now,6,6,6,6.1); + + +$loop_count = 0 + +loop8: + +sleep 200 + +$loop_count = $loop_count + 1 +if $loop_count == 20 then + return -1 +endi + +print sql select * from streamt; +sql select * from streamt; + +if $rows != 6 then + print ======rows=$rows + goto loop8 +endi + +if $data01 != 1 then + print ======data01=$data01 + return -1 +endi + +if $data11 != 1 then + print ======data11=$data11 + return -1 +endi + +if $data21 != 1 then + print ======data21=$data21 + return -1 +endi + +$loop_count = 0 + +loop8_1: + +sleep 200 + +$loop_count = $loop_count + 1 +if $loop_count == 20 then + return -1 +endi + +print sql select * from streamt1; +sql select * from streamt1; + +if $rows != 6 then + print ======rows=$rows + goto loop8_1 +endi + +if $data01 != 1 then + print ======data01=$data01 + return -1 +endi + +if $data11 != 1 then + print ======data11=$data11 + return -1 +endi + +if $data21 != 1 then + print ======data21=$data21 + return -1 +endi + +sleep 2000 + +sql insert into t1 values(now,1,1,1,1.1) t2 values (now,2,2,2,2.1) t3 values(now,3,3,3,3.1) t4 values(now,4,4,4,4.1) t5 values (now,5,5,5,5.1) t6 values(now,6,6,6,6.1); + +sleep 2000 + +sql insert into t1 values(now,1,1,1,1.1) t2 values (now,2,2,2,2.1) t3 values(now,3,3,3,3.1) t4 values(now,4,4,4,4.1) t5 values (now,5,5,5,5.1) t6 values(now,6,6,6,6.1); + +$loop_count = 0 + +loop8_1: + +sleep 200 + +$loop_count = $loop_count + 1 +if $loop_count == 20 then + return -1 +endi + +print sql select * from streamt order by 1 desc; +sql select * from streamt order by 1 desc; + +if $data01 != 1 then + print ======data01=$data01 + goto loop8_1 +endi + +print sql select * from streamt1 order by 1 desc; +sql select * from streamt1 order by 1 desc; + +if $data01 != 1 then + print ======data01=$data01 + goto loop8_1 +endi + +sleep 2000 + +sql insert into t1 values(now,1,1,1,1.1) +sql insert into t2 values(now,2,2,2,2.1); +sql insert into t3 values(now,3,3,3,3.1); +sql insert into t4 values(now,4,4,4,4.1); +sql insert into t5 values(now,5,5,5,5.1); +sql insert into t6 values(now,6,6,6,6.1); + +$loop_count = 0 + +loop9: + +sleep 200 + +$loop_count = $loop_count + 1 +if $loop_count == 10 then + return -1 +endi + +print sql select * from streamt order by 1 desc; +sql select * from streamt order by 1 desc; + +if $data01 != 1 then + print ======data01=$data01 + goto loop9 +endi + +if $data11 != 1 then + print ======data11=$data11 + goto loop9 +endi + +if $data21 != 1 then + print ======data21=$data21 + goto loop9 +endi + +$loop_count = 0 + +loop10: + +sleep 200 + +$loop_count = $loop_count + 1 +if $loop_count == 10 then + return -1 +endi + +print sql select * from streamt1 order by 1 desc; +sql select * from streamt1 order by 1 desc; + +if $data01 != 1 then + print ======data01=$data01 + goto loop10 +endi + +if $data11 != 1 then + print ======data11=$data11 + goto loop10 +endi + +if $data21 != 1 then + print ======data21=$data21 + goto loop10 +endi system sh/exec.sh -n dnode1 -s stop -x SIGINT \ No newline at end of file From b5aec92c2c8e6e0f495910670cd5539e41206ce0 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 24 Oct 2024 12:54:50 +0800 Subject: [PATCH 089/102] db/param: fix s3_keeplocal defualt value --- tests/develop-test/2-query/show_create_db.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/develop-test/2-query/show_create_db.py b/tests/develop-test/2-query/show_create_db.py index d6dcffcbb4..b77e744df2 100644 --- a/tests/develop-test/2-query/show_create_db.py +++ b/tests/develop-test/2-query/show_create_db.py @@ -42,17 +42,17 @@ class TDTestCase: tdSql.query('show create database scd;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd') - tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") tdSql.query('show create database scd2;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd2') - tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") tdSql.query('show create database scd4') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd4') - tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") self.restartTaosd(1, dbname='scd') @@ -60,16 +60,16 @@ class TDTestCase: tdSql.query('show create database scd;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd') - tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 2 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") tdSql.query('show create database scd2;') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd2') - tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd2` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 3 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") tdSql.query('show create database scd4') tdSql.checkRows(1) tdSql.checkData(0, 0, 'scd4') - tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 5256000m S3_COMPACT 1") + tdSql.checkData(0, 1, "CREATE DATABASE `scd4` BUFFER 256 CACHESIZE 1 CACHEMODEL 'none' COMP 2 DURATION 10d WAL_FSYNC_PERIOD 3000 MAXROWS 4096 MINROWS 100 STT_TRIGGER 13 KEEP 3650d,3650d,3650d PAGES 256 PAGESIZE 4 PRECISION 'ms' REPLICA 1 WAL_LEVEL 1 VGROUPS 2 SINGLE_STABLE 0 TABLE_PREFIX 0 TABLE_SUFFIX 0 TSDB_PAGESIZE 4 WAL_RETENTION_PERIOD 3600 WAL_RETENTION_SIZE 0 KEEP_TIME_OFFSET 0 ENCRYPT_ALGORITHM 'none' S3_CHUNKSIZE 262144 S3_KEEPLOCAL 525600m S3_COMPACT 1") tdSql.execute('drop database scd') From 4313b3ce40d7f547c9f199d6557c2517f42a45eb Mon Sep 17 00:00:00 2001 From: Alex Duan <51781608+DuanKuanJun@users.noreply.github.com> Date: Thu, 24 Oct 2024 13:54:44 +0800 Subject: [PATCH 090/102] Update 17-json.md modifi is not nul --- docs/zh/14-reference/03-taos-sql/17-json.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/03-taos-sql/17-json.md b/docs/zh/14-reference/03-taos-sql/17-json.md index 18c25cfe23..6e1f568bac 100644 --- a/docs/zh/14-reference/03-taos-sql/17-json.md +++ b/docs/zh/14-reference/03-taos-sql/17-json.md @@ -33,7 +33,7 @@ description: 对 JSON 类型如何使用的详细说明 ## 支持的操作 -1. 在 where 条件中时,支持函数 match/nmatch/between and/like/and/or/is null/is no null,不支持 in +1. 在 where 条件中时,支持函数 match/nmatch/between and/like/and/or/is null/is not null,不支持 in ``` select * from s1 where info->'k1' match 'v*'; From 515b93ce2274ed4425450ecef1d54ea5ace5ea39 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Thu, 24 Oct 2024 14:08:55 +0800 Subject: [PATCH 091/102] add tests for fill --- tests/system-test/2-query/fill_with_group.py | 45 ++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/system-test/2-query/fill_with_group.py b/tests/system-test/2-query/fill_with_group.py index 49c3b5dcf8..3b98ec30ce 100644 --- a/tests/system-test/2-query/fill_with_group.py +++ b/tests/system-test/2-query/fill_with_group.py @@ -303,6 +303,51 @@ class TDTestCase: tdSql.query(sql, queryTimes=1) tdSql.checkRows(0) + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(value, 0, 0) HAVING(timediff(last(ts), _wstart) + t1 >= 1) ORDER BY timediff(last(ts), _wstart), tbname" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(48) + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(value, 0, 0) HAVING(count(*) >= 0) ORDER BY timediff(last(ts), _wstart), tbname" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(120) + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname, t1 INTERVAL(5m) FILL(value, 0, 0) HAVING(count(*) > 0) ORDER BY timediff(last(ts), _wstart), tbname" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(60) + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname INTERVAL(5m) FILL(linear) HAVING(count(*) >= 0 and t1 <= 1) ORDER BY timediff(last(ts), _wstart), tbname, t1" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(44) + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname INTERVAL(5m) FILL(prev) HAVING(count(*) >= 0 and t1 > 1) ORDER BY timediff(last(ts), _wstart), tbname, t1" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(72) + + sql = "SELECT 1+1, count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname INTERVAL(5m) FILL(linear) ORDER BY tbname, _wstart;" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(120) + for i in range(11, 120, 12): + tdSql.checkData(i, 1, None) + for i in range(0, 120): + tdSql.checkData(i, 0, 2) + + sql = "SELECT count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1, concat(to_char(_wstart, 'HH:MI:SS__'), tbname) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY tbname INTERVAL(5m) FILL(linear) HAVING(count(*) >= 0) ORDER BY tbname;" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(110) + for i in range(0, 110, 11): + lastCol = tdSql.getData(i, 3) + tdLog.debug(f"lastCol: {lastCol}") + if lastCol[-1:] != str(i//11): + tdLog.exit(f"query error: lastCol: {lastCol}") + + sql = "SELECT 1+1, count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1,t1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY t1 INTERVAL(5m) FILL(linear) ORDER BY t1, _wstart;" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(60) + + sql = "SELECT 1+1, count(*), timediff(_wend, last(ts)) + t1, timediff('2018-09-20 01:00:00', _wstart) + t1,t1 FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY t1 INTERVAL(5m) FILL(linear) HAVING(count(*) > 0) ORDER BY t1, _wstart;" + tdSql.query(sql, queryTimes=1) + tdSql.checkRows(55) + + # TODO Fix Me! + sql = "explain SELECT count(*), timediff(_wend, last(ts)), timediff('2018-09-20 01:00:00', _wstart) FROM meters WHERE ts >= '2018-09-20 00:00:00.000' AND ts < '2018-09-20 01:00:00.000' PARTITION BY concat(tbname, 'asd') INTERVAL(5m) having(concat(tbname, 'asd') like '%asd');" + tdSql.error(sql, -2147473664) # Error: Planner internal error + def run(self): self.prepareTestEnv() self.test_partition_by_with_interval_fill_prev_new_group_fill_error() From 421ea0def2473d5d878628252d7cb7a408075392 Mon Sep 17 00:00:00 2001 From: Yu Chen <74105241+yu285@users.noreply.github.com> Date: Thu, 24 Oct 2024 14:39:29 +0800 Subject: [PATCH 092/102] docs : 2nd time Update firstEP description in 02-taosc.md --- docs/zh/14-reference/01-components/02-taosc.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/01-components/02-taosc.md b/docs/zh/14-reference/01-components/02-taosc.md index 5f22ebe8d5..3967924951 100755 --- a/docs/zh/14-reference/01-components/02-taosc.md +++ b/docs/zh/14-reference/01-components/02-taosc.md @@ -10,7 +10,7 @@ TDengine 客户端驱动提供了应用编程所需要的全部 API,并且在 | 参数名称 | 参数含义 | |:-----------:|:----------------------------------------------------------:| -|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:${hostname}:6030,若无法获取 ${hostname},则赋值为 localhost | +|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:hostname:6030,若无法获取该服务器的 hostname,则赋值为 localhost | |secondEp | 启动时,如果 firstEp 连接不上,尝试连接集群中第二个 dnode 的 endpoint,没有缺省值 | |numOfRpcSessions | 一个客户端能创建的最大连接数,取值范围:10-50000000(单位为毫秒);缺省值:500000 | |telemetryReporting | 是否上传 telemetry,0: 不上传,1: 上传;缺省值:1 | From a37fb974bd23e84492398973611b852381e6b148 Mon Sep 17 00:00:00 2001 From: Shengliang Guan Date: Thu, 24 Oct 2024 14:48:08 +0800 Subject: [PATCH 093/102] minor changes --- docs/zh/14-reference/01-components/02-taosc.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/01-components/02-taosc.md b/docs/zh/14-reference/01-components/02-taosc.md index 3967924951..d9f36a2df7 100755 --- a/docs/zh/14-reference/01-components/02-taosc.md +++ b/docs/zh/14-reference/01-components/02-taosc.md @@ -10,7 +10,7 @@ TDengine 客户端驱动提供了应用编程所需要的全部 API,并且在 | 参数名称 | 参数含义 | |:-----------:|:----------------------------------------------------------:| -|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:hostname:6030,若无法获取该服务器的 hostname,则赋值为 localhost | +|firstEp | taos 启动时,主动连接的集群中首个 dnode 的 endpoint,缺省值:hostname:6030,若无法获取该服务器的 hostname,则赋值为 localhost | |secondEp | 启动时,如果 firstEp 连接不上,尝试连接集群中第二个 dnode 的 endpoint,没有缺省值 | |numOfRpcSessions | 一个客户端能创建的最大连接数,取值范围:10-50000000(单位为毫秒);缺省值:500000 | |telemetryReporting | 是否上传 telemetry,0: 不上传,1: 上传;缺省值:1 | From c26ab17f19eabe0e1a951edec7764747ed1523a0 Mon Sep 17 00:00:00 2001 From: Jing Sima Date: Wed, 16 Oct 2024 17:34:32 +0800 Subject: [PATCH 094/102] enh:[TD-32459] Abstract function properties into a struct. --- include/util/taoserror.h | 5 + source/libs/function/inc/builtins.h | 38 + source/libs/function/inc/functionMgtInt.h | 65 + source/libs/function/src/builtins.c | 5311 ++++++++++++-------- source/util/src/terror.c | 5 + tests/army/query/function/test_function.py | 2 +- 6 files changed, 3255 insertions(+), 2171 deletions(-) diff --git a/include/util/taoserror.h b/include/util/taoserror.h index fd8970a50f..a53923b904 100644 --- a/include/util/taoserror.h +++ b/include/util/taoserror.h @@ -917,6 +917,11 @@ int32_t taosGetErrSize(); #define TSDB_CODE_FUNC_INVALID_RES_LENGTH TAOS_DEF_ERROR_CODE(0, 0x280E) #define TSDB_CODE_FUNC_HISTOGRAM_ERROR TAOS_DEF_ERROR_CODE(0, 0x280F) #define TSDB_CODE_FUNC_PERCENTILE_ERROR TAOS_DEF_ERROR_CODE(0, 0x2810) +#define TSDB_CODE_FUNC_FUNTION_PARA_RANGE TAOS_DEF_ERROR_CODE(0, 0x2811) +#define TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS TAOS_DEF_ERROR_CODE(0, 0x2812) +#define TSDB_CODE_FUNC_FUNTION_PARA_PK TAOS_DEF_ERROR_CODE(0, 0x2813) +#define TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL TAOS_DEF_ERROR_CODE(0, 0x2814) +#define TSDB_CODE_FUNC_FUNCTION_HISTO_TYPE TAOS_DEF_ERROR_CODE(0, 0x2815) //udf diff --git a/source/libs/function/inc/builtins.h b/source/libs/function/inc/builtins.h index 5707ee76f4..c76d32efee 100644 --- a/source/libs/function/inc/builtins.h +++ b/source/libs/function/inc/builtins.h @@ -22,16 +22,54 @@ extern "C" { #include "functionMgtInt.h" +struct SFunctionParaInfo; + typedef int32_t (*FTranslateFunc)(SFunctionNode* pFunc, char* pErrBuf, int32_t len); typedef EFuncDataRequired (*FFuncDataRequired)(SFunctionNode* pFunc, STimeWindow* pTimeWindow); typedef int32_t (*FCreateMergeFuncParameters)(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters); typedef EFuncDataRequired (*FFuncDynDataRequired)(void* pRes, SDataBlockInfo* pBlocInfo); typedef EFuncReturnRows (*FEstimateReturnRows)(SFunctionNode* pFunc); +#define MAX_FUNC_PARA_NUM 16 + +typedef struct SParamRange { + double dMinVal; + double dMaxVal; +} SParamRange; + +typedef struct SParamInfo { + bool isLastParam; + int8_t startParam; + int8_t endParam; + uint64_t validDataType; + uint64_t validNodeType; + bool hasRange; + bool isTs; // used for input parameter + bool isPK; // used for input parameter + bool isFixedValue; // used for input parameter + bool hasColumn; // used for input parameter, parameter must contain columns + bool isFirstLast; // special check for first and last + bool isTimeUnit; // used for input parameter, need check whether time unit is valid + bool isHistogramBin; // used for input parameter, need check whether histogram bin is valid + uint8_t fixedValueSize; + char fixedStrValue[MAX_FUNC_PARA_NUM][16]; // used for input parameter + int32_t fixedNumValue[MAX_FUNC_PARA_NUM]; // used for input parameter + SParamRange range; +} SParamInfo; + +typedef struct SFunctionParaInfo { + int8_t minParamNum; + int8_t maxParamNum; + uint8_t paramInfoPattern; + SParamInfo inputParaInfo[MAX_FUNC_PARA_NUM][MAX_FUNC_PARA_NUM]; + SParamInfo outputParaInfo; +} SFunctionParaInfo; + typedef struct SBuiltinFuncDefinition { const char* name; EFunctionType type; uint64_t classification; + SFunctionParaInfo parameters; FTranslateFunc translateFunc; FFuncDataRequired dataRequiredFunc; FFuncDynDataRequired dynDataRequiredFunc; diff --git a/source/libs/function/inc/functionMgtInt.h b/source/libs/function/inc/functionMgtInt.h index 3112245de9..924ec6d40a 100644 --- a/source/libs/function/inc/functionMgtInt.h +++ b/source/libs/function/inc/functionMgtInt.h @@ -64,6 +64,71 @@ extern "C" { #define FUNC_UDF_ID_START 5000 +#define FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(n) ((uint64_t)1 << n) +#define FUNC_PARAM_SUPPORT_ALL_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(0) +#define FUNC_PARAM_SUPPORT_NUMERIC_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(1) +#define FUNC_PARAM_SUPPORT_VAR_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(2) +#define FUNC_PARAM_SUPPORT_STRING_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(3) +#define FUNC_PARAM_SUPPORT_BOOL_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(4) +#define FUNC_PARAM_SUPPORT_TINYINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(5) +#define FUNC_PARAM_SUPPORT_SMALLINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(6) +#define FUNC_PARAM_SUPPORT_INT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(7) +#define FUNC_PARAM_SUPPORT_BIGINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(8) +#define FUNC_PARAM_SUPPORT_FLOAT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(9) +#define FUNC_PARAM_SUPPORT_DOUBLE_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(10) +#define FUNC_PARAM_SUPPORT_VARCHAR_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(11) +#define FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(12) +#define FUNC_PARAM_SUPPORT_NCHAR_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(13) +#define FUNC_PARAM_SUPPORT_UTINYINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(14) +#define FUNC_PARAM_SUPPORT_USMALLINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(15) +#define FUNC_PARAM_SUPPORT_UINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(16) +#define FUNC_PARAM_SUPPORT_UBIGINT_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(17) +#define FUNC_PARAM_SUPPORT_JSON_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(18) +#define FUNC_PARAM_SUPPORT_VARB_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(19) +#define FUNC_PARAM_SUPPORT_GEOMETRY_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(20) +#define FUNC_PARAM_SUPPORT_INTEGER_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(21) +#define FUNC_PARAM_SUPPORT_NULL_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(22) +#define FUNC_PARAM_SUPPORT_UNIX_TS_TYPE FUNC_MGT_FUNC_PARAM_SUPPORT_TYPE(23) + + + +#define FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(n) ((uint64_t)1 << n) +#define FUNC_PARAM_SUPPORT_EXPR_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(0) +#define FUNC_PARAM_SUPPORT_VALUE_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(1) +#define FUNC_PARAM_SUPPORT_OPERATOR_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(2) +#define FUNC_PARAM_SUPPORT_FUNCTION_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(3) +#define FUNC_PARAM_SUPPORT_LOGIC_CONDITION_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(4) +#define FUNC_PARAM_SUPPORT_CASE_WHEN_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(5) +#define FUNC_PARAM_SUPPORT_COLUMN_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(6) +#define FUNC_PARAM_SUPPORT_NOT_VALUE_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(7) + +#define FUNC_PARAM_SUPPORT_NODE_MAX 7 + +#define FUNC_ERR_RET(c) \ + do { \ + int32_t _code = c; \ + if (_code != TSDB_CODE_SUCCESS) { \ + terrno = _code; \ + return _code; \ + } \ + } while (0) +#define FUNC_RET(c) \ + do { \ + int32_t _code = c; \ + if (_code != TSDB_CODE_SUCCESS) { \ + terrno = _code; \ + } \ + return _code; \ + } while (0) +#define FUNC_ERR_JRET(c) \ + do { \ + code = c; \ + if (code != TSDB_CODE_SUCCESS) { \ + terrno = code; \ + goto _return; \ + } \ + } while (0) + #ifdef __cplusplus } #endif diff --git a/source/libs/function/src/builtins.c b/source/libs/function/src/builtins.c index 1fd99125a0..276c77f567 100644 --- a/source/libs/function/src/builtins.c +++ b/source/libs/function/src/builtins.c @@ -90,28 +90,6 @@ static bool validateMinuteRange(int8_t hour, int8_t minute, char sign) { return false; } -static bool validateTimestampDigits(const SValueNode* pVal) { - if (!IS_INTEGER_TYPE(pVal->node.resType.type)) { - return false; - } - - int64_t tsVal = pVal->datum.i; - char fraction[20] = {0}; - NUM_TO_STRING(pVal->node.resType.type, &tsVal, sizeof(fraction), fraction); - int32_t tsDigits = (int32_t)strlen(fraction); - - if (tsDigits > TSDB_TIME_PRECISION_SEC_DIGITS) { - if (tsDigits == TSDB_TIME_PRECISION_MILLI_DIGITS || tsDigits == TSDB_TIME_PRECISION_MICRO_DIGITS || - tsDigits == TSDB_TIME_PRECISION_NANO_DIGITS) { - return true; - } else { - return false; - } - } - - return true; -} - static bool validateTimezoneFormat(const SValueNode* pVal) { if (TSDB_DATA_TYPE_BINARY != pVal->node.resType.type) { return false; @@ -298,879 +276,228 @@ static SDataType* getSDataTypeFromNode(SNode* pNode) { } } -// There is only one parameter of numeric type, and the return type is parameter type -static int32_t translateInOutNum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } else if (IS_NULL_TYPE(paraType)) { - paraType = TSDB_DATA_TYPE_BIGINT; - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[paraType].bytes, .type = paraType}; - return TSDB_CODE_SUCCESS; +static bool paramSupportNull(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NULL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE); } -// There is only one parameter of numeric type, and the return type is parameter type -static int32_t translateMinMax(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SDataType* dataType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - uint8_t paraType = dataType->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType) && !IS_STR_DATA_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } else if (IS_NULL_TYPE(paraType)) { - paraType = TSDB_DATA_TYPE_BIGINT; - } - int32_t bytes = IS_STR_DATA_TYPE(paraType) ? dataType->bytes : tDataTypes[paraType].bytes; - pFunc->node.resType = (SDataType){.bytes = bytes, .type = paraType}; - return TSDB_CODE_SUCCESS; +static bool paramSupportBool(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_BOOL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE); } -// There is only one parameter of numeric type, and the return type is double type -static int32_t translateInNumOutDou(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; +static bool paramSupportTinyint(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_TINYINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -// There are two parameters of numeric type, and the return type is double type -static int32_t translateIn2NumOutDou(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if ((!IS_NUMERIC_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) || - (!IS_NUMERIC_TYPE(para2Type) && !IS_NULL_TYPE(para2Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; +static bool paramSupportSmallint(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_SMALLINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -// There is only one parameter of string type, and the return type is parameter type -static int32_t translateInOutStr(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SDataType* pRestType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - if (TSDB_DATA_TYPE_VARBINARY == pRestType->type || !IS_STR_DATA_TYPE(pRestType->type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = pRestType->bytes, .type = pRestType->type}; - return TSDB_CODE_SUCCESS; +static bool paramSupportInt(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateTrimStr(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isLtrim) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SDataType* pRestType1 = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - if (TSDB_DATA_TYPE_VARBINARY == pRestType1->type || !IS_STR_DATA_TYPE(pRestType1->type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - int32_t numOfSpaces = 0; - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 0); - // for select trim functions with constant value from table, - // need to set the proper result result schema bytes to avoid - // trailing garbage characters - if (nodeType(pParamNode1) == QUERY_NODE_VALUE) { - SValueNode* pValue = (SValueNode*)pParamNode1; - numOfSpaces = countTrailingSpaces(pValue, isLtrim); - } - - int32_t resBytes = pRestType1->bytes - numOfSpaces; - pFunc->node.resType = (SDataType){.bytes = resBytes, .type = pRestType1->type}; - return TSDB_CODE_SUCCESS; +static bool paramSupportBigint(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_BIGINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateLtrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateTrimStr(pFunc, pErrBuf, len, true); +static bool paramSupportFloat(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_FLOAT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE); } -static int32_t translateRtrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateTrimStr(pFunc, pErrBuf, len, false); +static bool paramSupportDouble(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_DOUBLE_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE); } -static int32_t translateLogarithm(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (1 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (2 == numOfParams) { - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(para2Type) && !IS_NULL_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; +static bool paramSupportVarchar(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VARCHAR_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_STRING_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VAR_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateCount(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; +static bool paramSupportTimestamp(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateSum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t resType = 0; - if (IS_SIGNED_NUMERIC_TYPE(paraType) || TSDB_DATA_TYPE_BOOL == paraType || IS_NULL_TYPE(paraType)) { - resType = TSDB_DATA_TYPE_BIGINT; - } else if (IS_UNSIGNED_NUMERIC_TYPE(paraType)) { - resType = TSDB_DATA_TYPE_UBIGINT; - } else if (IS_FLOAT_TYPE(paraType)) { - resType = TSDB_DATA_TYPE_DOUBLE; - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[resType].bytes, .type = resType}; - return TSDB_CODE_SUCCESS; +static bool paramSupportNchar(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NCHAR_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_STRING_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VAR_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateAvgPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = getAvgInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportUTinyInt(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UTINYINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateAvgMiddle(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 0))->resType.type; - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = getAvgInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportUSmallInt(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_USMALLINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateAvgMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - - return TSDB_CODE_SUCCESS; +static bool paramSupportUInt(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateAvgState(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - - pFunc->node.resType = (SDataType){.bytes = getAvgInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportUBigInt(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UBIGINT_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NUMERIC_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_INTEGER_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_UNIX_TS_TYPE); } -static int32_t translateAvgStateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - - pFunc->node.resType = (SDataType){.bytes = getAvgInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportJSON(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_JSON_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VAR_TYPE); } -static int32_t translateStdPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = getStdInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportVarBinary(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VARB_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VAR_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_STRING_TYPE); } -static int32_t translateStdMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - - return TSDB_CODE_SUCCESS; +static bool paramSupportGeometry(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_GEOMETRY_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_ALL_TYPE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VAR_TYPE); } -static int32_t translateStdState(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = getStdInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; +static bool paramSupportValueNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_VALUE_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE); } -static int32_t translateStdStateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = getStdInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - - return TSDB_CODE_SUCCESS; +static bool paramSupportOperatorNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_OPERATOR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NOT_VALUE_NODE); } -static int32_t translateWduration(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT, - .precision = pFunc->node.resType.precision}; - return TSDB_CODE_SUCCESS; +static bool paramSupportFunctionNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_FUNCTION_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NOT_VALUE_NODE); } -static int32_t translateNowToday(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - - // add database precision as param - uint8_t dbPrec = pFunc->node.resType.precision; - int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_TIMESTAMP].bytes, .type = TSDB_DATA_TYPE_TIMESTAMP}; - return TSDB_CODE_SUCCESS; +static bool paramSupportLogicConNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_LOGIC_CONDITION_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NOT_VALUE_NODE); } -static int32_t translatePi(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; +static bool paramSupportCaseWhenNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_CASE_WHEN_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NOT_VALUE_NODE); } -static int32_t translateRand(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (0 != LIST_LENGTH(pFunc->pParameterList) && 1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (1 == LIST_LENGTH(pFunc->pParameterList)) { - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_INTEGER_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - if (!pFunc->dual) { - int32_t code = addPseudoParam(&pFunc->pParameterList); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - } - - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; +static bool paramSupportColumnNode(uint64_t typeFlag) { + return FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_EXPR_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_COLUMN_NODE) || + FUNC_MGT_TEST_MASK(typeFlag, FUNC_PARAM_SUPPORT_NOT_VALUE_NODE); } -static int32_t translateRound(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList) && 1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); +static bool paramSupportNodeType(SNode* pNode, uint64_t typeFlag) { + switch (pNode->type) { + case QUERY_NODE_VALUE: + return paramSupportValueNode(typeFlag); + case QUERY_NODE_OPERATOR: + return paramSupportOperatorNode(typeFlag); + case QUERY_NODE_FUNCTION: + return paramSupportFunctionNode(typeFlag); + case QUERY_NODE_LOGIC_CONDITION: + return paramSupportLogicConNode(typeFlag); + case QUERY_NODE_CASE_WHEN: + return paramSupportCaseWhenNode(typeFlag); + case QUERY_NODE_COLUMN: + return paramSupportColumnNode(typeFlag); + default: + return false; } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } else if (IS_NULL_TYPE(paraType)) { - paraType = TSDB_DATA_TYPE_BIGINT; - } - - if (2 == LIST_LENGTH(pFunc->pParameterList)) { - uint8_t paraType2 = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(paraType2) && !IS_NULL_TYPE(paraType2)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - pFunc->node.resType = (SDataType){.bytes = tDataTypes[paraType].bytes, .type = paraType}; - return TSDB_CODE_SUCCESS; } -static int32_t translateTrunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); +static bool paramSupportDataType(SDataType* pDataType, uint64_t typeFlag) { + switch (pDataType->type) { + case TSDB_DATA_TYPE_NULL: + return paramSupportNull(typeFlag); + case TSDB_DATA_TYPE_BOOL: + return paramSupportBool(typeFlag); + case TSDB_DATA_TYPE_TINYINT: + return paramSupportTinyint(typeFlag); + case TSDB_DATA_TYPE_SMALLINT: + return paramSupportSmallint(typeFlag); + case TSDB_DATA_TYPE_INT: + return paramSupportInt(typeFlag); + case TSDB_DATA_TYPE_BIGINT: + return paramSupportBigint(typeFlag); + case TSDB_DATA_TYPE_FLOAT: + return paramSupportFloat(typeFlag); + case TSDB_DATA_TYPE_DOUBLE: + return paramSupportDouble(typeFlag); + case TSDB_DATA_TYPE_VARCHAR: + return paramSupportVarchar(typeFlag); + case TSDB_DATA_TYPE_TIMESTAMP: + return paramSupportTimestamp(typeFlag); + case TSDB_DATA_TYPE_NCHAR: + return paramSupportNchar(typeFlag); + case TSDB_DATA_TYPE_UTINYINT: + return paramSupportUTinyInt(typeFlag); + case TSDB_DATA_TYPE_USMALLINT: + return paramSupportUSmallInt(typeFlag); + case TSDB_DATA_TYPE_UINT: + return paramSupportUInt(typeFlag); + case TSDB_DATA_TYPE_UBIGINT: + return paramSupportUBigInt(typeFlag); + case TSDB_DATA_TYPE_JSON: + return paramSupportJSON(typeFlag); + case TSDB_DATA_TYPE_VARBINARY: + return paramSupportVarBinary(typeFlag); + case TSDB_DATA_TYPE_GEOMETRY: + return paramSupportGeometry(typeFlag); + default: + return false; } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType2 = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(paraType2) && !IS_NULL_TYPE(paraType2)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[paraType].bytes, .type = paraType}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateTimePseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_TIMESTAMP].bytes, .type = TSDB_DATA_TYPE_TIMESTAMP, - .precision = pFunc->node.resType.precision}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateIsFilledPseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BOOL].bytes, .type = TSDB_DATA_TYPE_BOOL}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateTimezone(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TD_TIMEZONE_LEN, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translatePercentile(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (numOfParams < 2 || numOfParams > 11) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - for (int32_t i = 1; i < numOfParams; ++i) { - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pValue)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pValue->notReserved = true; - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (!IS_NUMERIC_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - double v = 0; - if (IS_INTEGER_TYPE(paraType)) { - v = (double)pValue->datum.i; - } else { - v = pValue->datum.d; - } - - if (v < 0 || v > 100) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - // set result type - if (numOfParams > 2) { - pFunc->node.resType = (SDataType){.bytes = 3200, .type = TSDB_DATA_TYPE_VARCHAR}; - } else { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - } - return TSDB_CODE_SUCCESS; -} - -static bool validateApercentileAlgo(const SValueNode* pVal) { - if (TSDB_DATA_TYPE_BINARY != pVal->node.resType.type) { - return false; - } - return (0 == strcasecmp(varDataVal(pVal->datum.p), "default") || - 0 == strcasecmp(varDataVal(pVal->datum.p), "t-digest")); -} - -static int32_t translateApercentile(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (nodeType(pParamNode1) != QUERY_NODE_VALUE) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (pValue->datum.i < 0 || pValue->datum.i > 100) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pValue->notReserved = true; - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(para1Type) || !IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param2 - if (3 == numOfParams) { - uint8_t para3Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type; - if (!IS_STR_DATA_TYPE(para3Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pParamNode2 = nodesListGetNode(pFunc->pParameterList, 2); - if (QUERY_NODE_VALUE != nodeType(pParamNode2) || !validateApercentileAlgo((SValueNode*)pParamNode2)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Third parameter algorithm of apercentile must be 'default' or 't-digest'"); - } - - pValue = (SValueNode*)pParamNode2; - pValue->notReserved = true; - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateApercentileImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - - if (isPartial) { - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (nodeType(pParamNode1) != QUERY_NODE_VALUE) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (pValue->datum.i < 0 || pValue->datum.i > 100) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pValue->notReserved = true; - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(para1Type) || !IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param2 - if (3 == numOfParams) { - uint8_t para3Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type; - if (!IS_STR_DATA_TYPE(para3Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pParamNode2 = nodesListGetNode(pFunc->pParameterList, 2); - if (QUERY_NODE_VALUE != nodeType(pParamNode2) || !validateApercentileAlgo((SValueNode*)pParamNode2)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Third parameter algorithm of apercentile must be 'default' or 't-digest'"); - } - - pValue = (SValueNode*)pParamNode2; - pValue->notReserved = true; - } - - pFunc->node.resType = - (SDataType){.bytes = getApercentileMaxSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - // original percent param is reserved - if (3 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (TSDB_DATA_TYPE_BINARY != para1Type || !IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (3 == numOfParams) { - uint8_t para3Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type; - if (!IS_STR_DATA_TYPE(para3Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pParamNode2 = nodesListGetNode(pFunc->pParameterList, 2); - if (QUERY_NODE_VALUE != nodeType(pParamNode2) || !validateApercentileAlgo((SValueNode*)pParamNode2)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Third parameter algorithm of apercentile must be 'default' or 't-digest'"); - } - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - } - - return TSDB_CODE_SUCCESS; -} - -static int32_t translateApercentilePartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateApercentileImpl(pFunc, pErrBuf, len, true); -} - -static int32_t translateApercentileMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateApercentileImpl(pFunc, pErrBuf, len, false); -} - -static int32_t translateTbnameColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - pFunc->node.resType = - (SDataType){.bytes = TSDB_TABLE_FNAME_LEN - 1 + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateTbUidColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateVgIdColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // pseudo column do not need to check parameters - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_INT].bytes, .type = TSDB_DATA_TYPE_INT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateVgVerColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateTopBot(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(para1Type) || !IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (nodeType(pParamNode1) != QUERY_NODE_VALUE) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (!IS_INTEGER_TYPE(pValue->node.resType.type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (pValue->datum.i < 1 || pValue->datum.i > TOP_BOTTOM_QUERY_LIMIT) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pValue->notReserved = true; - - // set result type - SDataType* pType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - pFunc->node.resType = (SDataType){.bytes = pType->bytes, .type = pType->type}; - return TSDB_CODE_SUCCESS; -} - -static int32_t reserveFirstMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { - int32_t code = nodesListMakeAppend(pParameters, pPartialRes); - if (TSDB_CODE_SUCCESS == code) { - SNode* pNew = NULL; - code = nodesCloneNode(nodesListGetNode(pRawParameters, 1), &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListStrictAppend(*pParameters, pNew); - } - } - return code; -} - -int32_t topBotCreateMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { - return reserveFirstMergeParam(pRawParameters, pPartialRes, pParameters); -} - -int32_t apercentileCreateMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { - int32_t code = reserveFirstMergeParam(pRawParameters, pPartialRes, pParameters); - if (TSDB_CODE_SUCCESS == code && pRawParameters->length >= 3) { - SNode* pNew = NULL; - code = nodesCloneNode(nodesListGetNode(pRawParameters, 2), &pNew); - if (TSDB_CODE_SUCCESS == code) { - code = nodesListStrictAppend(*pParameters, pNew); - } - } - return code; -} - -static int32_t translateSpread(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_TIMESTAMP_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateSpreadImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (isPartial) { - if (!IS_NUMERIC_TYPE(paraType) && !IS_TIMESTAMP_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = getSpreadInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - } - - return TSDB_CODE_SUCCESS; -} - -static int32_t translateSpreadPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateSpreadImpl(pFunc, pErrBuf, len, true); -} - -static int32_t translateSpreadMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateSpreadImpl(pFunc, pErrBuf, len, false); -} - -static int32_t translateSpreadState(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(paraType) && !IS_TIMESTAMP_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = getSpreadInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateSpreadStateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (paraType != TSDB_DATA_TYPE_BINARY) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = getSpreadInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateElapsed(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (1 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pPara1 = nodesListGetNode(pFunc->pParameterList, 0); - if (QUERY_NODE_COLUMN != nodeType(pPara1) || PRIMARYKEY_TIMESTAMP_COL_ID != ((SColumnNode*)pPara1)->colId) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "The first parameter of the ELAPSED function can only be the timestamp primary key"); - } - - // param1 - if (2 == numOfParams) { - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - - pValue->notReserved = true; - - if (!IS_INTEGER_TYPE(pValue->node.resType.type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t dbPrec = pFunc->node.resType.precision; - - int32_t code = validateTimeUnitParam(dbPrec, (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1)); - if (code == TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL) { - return buildFuncErrMsg(pErrBuf, len, code, - "ELAPSED function time unit parameter should be greater than db precision"); - } else if (code == TSDB_CODE_FUNC_TIME_UNIT_INVALID) { - return buildFuncErrMsg( - pErrBuf, len, code, - "ELAPSED function time unit parameter should be one of the following: [1b, 1u, 1a, 1s, 1m, 1h, 1d, 1w]"); - } - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateElapsedImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - - if (isPartial) { - if (1 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_TIMESTAMP_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 - if (2 == numOfParams) { - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - - pValue->notReserved = true; - - paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (pValue->datum.i == 0) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "ELAPSED function time unit parameter should be greater than db precision"); - } - } - - pFunc->node.resType = - (SDataType){.bytes = getElapsedInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - if (1 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - } - return TSDB_CODE_SUCCESS; -} - -static int32_t translateElapsedPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { -#if 0 - return translateElapsedImpl(pFunc, pErrBuf, len, true); -#endif - return 0; -} - -static int32_t translateElapsedMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { -#if 0 - return translateElapsedImpl(pFunc, pErrBuf, len, false); -#endif - return 0; -} - -static int32_t translateLeastSQR(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - for (int32_t i = 0; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (i > 0) { // param1 & param2 - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode; - - pValue->notReserved = true; - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - pFunc->node.resType = (SDataType){.bytes = LEASTSQUARES_BUFF_LENGTH, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; } typedef enum { UNKNOWN_BIN = 0, USER_INPUT_BIN, LINEAR_BIN, LOG_BIN } EHistoBinType; @@ -1362,473 +689,460 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* return TSDB_CODE_SUCCESS; } -static int32_t translateHistogram(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (4 != numOfParams) { +static int32_t validateParam(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + int32_t code = TSDB_CODE_SUCCESS; + SNodeList* paramList = pFunc->pParameterList; + char errMsg[128] = {0}; + + // no need to check + if (funcMgtBuiltins[pFunc->funcId].parameters.paramInfoPattern == 0) { + return TSDB_CODE_SUCCESS; + } + + // check param num + if ((funcMgtBuiltins[pFunc->funcId].parameters.maxParamNum != -1 && + LIST_LENGTH(paramList) > funcMgtBuiltins[pFunc->funcId].parameters.maxParamNum) || + LIST_LENGTH(paramList) < funcMgtBuiltins[pFunc->funcId].parameters.minParamNum) { return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); } - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + // check each param + for (int32_t i = 0; i < funcMgtBuiltins[pFunc->funcId].parameters.paramInfoPattern; i++) { + bool isMatch = true; + int32_t paramIdx = 0; + const SParamInfo* paramPattern = funcMgtBuiltins[pFunc->funcId].parameters.inputParaInfo[i]; - // param1 ~ param3 - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type != TSDB_DATA_TYPE_BINARY || - getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_BINARY || - !IS_INTEGER_TYPE(getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 3))->type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + while (1) { + for (int8_t j = paramPattern[paramIdx].startParam; j <= (paramPattern[paramIdx].endParam == -1 ? INT8_MAX : paramPattern[paramIdx].endParam); j++) { + if (j > LIST_LENGTH(paramList)) { + code = TSDB_CODE_SUCCESS; + isMatch = true; + break; + } + SNode* pNode = nodesListGetNode(paramList, j - 1); + // check node type + if (!paramSupportNodeType(pNode, paramPattern[paramIdx].validNodeType)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + isMatch = false; + break; + } + // check data type + if (!paramSupportDataType(getSDataTypeFromNode(pNode), paramPattern[paramIdx].validDataType)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + isMatch = false; + break; + } + if (paramPattern[paramIdx].validNodeType == FUNC_PARAM_SUPPORT_VALUE_NODE) { + SValueNode* pVal = (SValueNode*)pNode; + pVal->notReserved = true; + } + // check range value + if (paramPattern[paramIdx].hasRange) { + if (pNode->type == QUERY_NODE_VALUE) { + SValueNode* pVal = (SValueNode*)pNode; + if (IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { + if ((double)pVal->datum.i < paramPattern[paramIdx].range.dMinVal || + (double)pVal->datum.i > paramPattern[paramIdx].range.dMaxVal) { + code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; + isMatch = false; + break; + } + } else { + if ((double)pVal->datum.d < paramPattern[paramIdx].range.dMinVal || + (double)pVal->datum.d > paramPattern[paramIdx].range.dMaxVal) { + code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; + isMatch = false; + break; + } + } + } else { + // for other node type, range check should be done in process function + } + } + // check fixed value + if (paramPattern[paramIdx].isFixedValue) { + if (pNode->type == QUERY_NODE_VALUE) { + SValueNode* pVal = (SValueNode*)pNode; + if (IS_NUMERIC_TYPE(getSDataTypeFromNode(pNode)->type)) { + for (int32_t k = 0; k < paramPattern[paramIdx].fixedValueSize; k++) { + if (pVal->datum.i == paramPattern[paramIdx].fixedNumValue[k]) { + code = TSDB_CODE_SUCCESS; + isMatch = true; + break; + } else { + code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; + isMatch = false; + } + } + } else if (IS_STR_DATA_TYPE(getSDataTypeFromNode(pNode)->type)) { + for (int32_t k = 0; k < paramPattern[paramIdx].fixedValueSize; k++) { + if (strcasecmp(pVal->literal, paramPattern[paramIdx].fixedStrValue[k]) == 0) { + code = TSDB_CODE_SUCCESS; + isMatch = true; + break; + } else { + code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; + isMatch = false; + } + } + } + if (!isMatch) { + break; + } + } else { + // for other node type, fixed value check should be done in process function + } + } + // check isTs + if (paramPattern[paramIdx].isTs) { + if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_TIMESTAMP_TYPE(getSDataTypeFromNode(pNode)->type) || + !((SColumnNode*)pNode)->isPrimTs) { + code = TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS; + isMatch = false; + break; + } + } + // check isPK + if (paramPattern[paramIdx].isPK) { + if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type) || + !((SColumnNode*)pNode)->isPk) { + code = TSDB_CODE_FUNC_FUNTION_PARA_PK; + isMatch = false; + break; + } + } + // check hasColumn + if (paramPattern[paramIdx].hasColumn) { + if (!nodesExprHasColumn(pNode)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL; + isMatch = false; + break; + } + } + // check first and last + if (paramPattern[paramIdx].isFirstLast) { + if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type) && QUERY_NODE_VALUE == nodeType(pNode)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + isMatch = false; + break; + } + } + // check time unit + if (paramPattern[paramIdx].isTimeUnit) { + if (nodeType(pNode) != QUERY_NODE_VALUE || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + isMatch = false; + break; + } - int8_t binType; - char* binDesc; - for (int32_t i = 1; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type)) { + code = TSDB_CODE_SUCCESS; + isMatch = true; + continue; + } - SValueNode* pValue = (SValueNode*)pParamNode; - - pValue->notReserved = true; - - if (i == 1) { - binType = validateHistogramBinType(varDataVal(pValue->datum.p)); - if (binType == UNKNOWN_BIN) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "HISTOGRAM function binType parameter should be " - "\"user_input\", \"log_bin\" or \"linear_bin\""); - } - } - - if (i == 2) { - char errMsg[128] = {0}; - binDesc = varDataVal(pValue->datum.p); - if (TSDB_CODE_SUCCESS != validateHistogramBinDesc(binDesc, binType, errMsg, (int32_t)sizeof(errMsg))) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, errMsg); - } - } - - if (i == 3 && pValue->datum.i != 1 && pValue->datum.i != 0) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "HISTOGRAM function normalized parameter should be 0/1"); - } - } - - pFunc->node.resType = (SDataType){.bytes = 512, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateHistogramImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (isPartial) { - if (4 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 ~ param3 - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type != TSDB_DATA_TYPE_BINARY || - getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_BINARY || - !IS_INTEGER_TYPE(getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 3))->type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - int8_t binType; - char* binDesc; - for (int32_t i = 1; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode; - - pValue->notReserved = true; - - if (i == 1) { - binType = validateHistogramBinType(varDataVal(pValue->datum.p)); - if (binType == UNKNOWN_BIN) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "HISTOGRAM function binType parameter should be " - "\"user_input\", \"log_bin\" or \"linear_bin\""); + code = validateTimeUnitParam(pFunc->node.resType.precision, (SValueNode*)pNode); + if (TSDB_CODE_SUCCESS != code) { + isMatch = false; + break; + } + } + // check histogram binary + if (paramPattern[paramIdx].isHistogramBin) { + if (nodeType(pNode) != QUERY_NODE_VALUE) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + isMatch = false; + break; + } + SValueNode *pValue = (SValueNode *)pNode; + SValueNode *pBinValue = (SValueNode *)nodesListGetNode(paramList, 1); + char* binDesc = varDataVal(pValue->datum.p); + int8_t binType = validateHistogramBinType(varDataVal(pBinValue->datum.p)); + if (binType == UNKNOWN_BIN) { + code = TSDB_CODE_FUNC_FUNCTION_HISTO_TYPE; + isMatch = false; + break; + } + code = validateHistogramBinDesc(binDesc, binType, errMsg, (int32_t)sizeof(errMsg)); + if (TSDB_CODE_SUCCESS != code) { + code = TSDB_CODE_FUNC_HISTOGRAM_ERROR; + isMatch = false; + break; + } } } - if (i == 2) { - char errMsg[128] = {0}; - binDesc = varDataVal(pValue->datum.p); - if (TSDB_CODE_SUCCESS != validateHistogramBinDesc(binDesc, binType, errMsg, (int32_t)sizeof(errMsg))) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, errMsg); - } - } - - if (i == 3 && pValue->datum.i != 1 && pValue->datum.i != 0) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "HISTOGRAM function normalized parameter should be 0/1"); + if (paramPattern[paramIdx].isLastParam || !isMatch) { + break; } + paramIdx++; } - - pFunc->node.resType = - (SDataType){.bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - if (1 != numOfParams) { + if (isMatch) { + return TSDB_CODE_SUCCESS; + } + } + switch (code) { + case TSDB_CODE_FUNC_FUNTION_PARA_NUM: return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type != TSDB_DATA_TYPE_BINARY) { + case TSDB_CODE_FUNC_FUNTION_PARA_TYPE: return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = 512, .type = TSDB_DATA_TYPE_BINARY}; + case TSDB_CODE_FUNC_FUNTION_PARA_VALUE: + return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); + case TSDB_CODE_FUNC_FUNTION_PARA_RANGE: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_PARA_RANGE, "Invalid parameter range : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS, "Parameter should be primary timestamp : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_FUNTION_PARA_PK: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_PARA_PK, "Parameter should be primary key : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL, "Parameter should have column : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_TIME_UNIT_INVALID: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_TIME_UNIT_INVALID, "Invalid timzone format : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL, "Time unit is too small : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_FUNCTION_HISTO_TYPE: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNCTION_HISTO_TYPE, "Invalid histogram bin type : %s", + pFunc->functionName); + case TSDB_CODE_FUNC_HISTOGRAM_ERROR: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_HISTOGRAM_ERROR, errMsg, pFunc->functionName); + default: + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, "Function check parameter failed : %s", + pFunc->functionName); } +} + +// There is only one parameter of numeric type, and the return type is parameter type +static int32_t translateOutNum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; + if (IS_NULL_TYPE(paraType)) { + paraType = TSDB_DATA_TYPE_BIGINT; + } + pFunc->node.resType = (SDataType){.bytes = tDataTypes[paraType].bytes, .type = paraType}; return TSDB_CODE_SUCCESS; } -static int32_t translateHistogramPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateHistogramImpl(pFunc, pErrBuf, len, true); +// There is only one parameter, and the return type is parameter type +static int32_t translateMinMax(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + + SDataType* dataType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); + uint8_t paraType = IS_NULL_TYPE(dataType->type) ? TSDB_DATA_TYPE_BIGINT : dataType->type; + int32_t bytes = IS_STR_DATA_TYPE(paraType) ? dataType->bytes : tDataTypes[paraType].bytes; + pFunc->node.resType = (SDataType){.bytes = bytes, .type = paraType}; + return TSDB_CODE_SUCCESS; } -static int32_t translateHistogramMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateHistogramImpl(pFunc, pErrBuf, len, false); +// The return type is DOUBLE type +static int32_t translateOutDouble(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; + return TSDB_CODE_SUCCESS; } -static int32_t translateHLL(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); + +static int32_t translateTrimStr(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isLtrim) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + + SDataType* pRestType1 = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); + + int32_t numOfSpaces = 0; + SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 0); + // for select trim functions with constant value from table, + // need to set the proper result result schema bytes to avoid + // trailing garbage characters + if (nodeType(pParamNode1) == QUERY_NODE_VALUE) { + SValueNode* pValue = (SValueNode*)pParamNode1; + numOfSpaces = countTrailingSpaces(pValue, isLtrim); } + int32_t resBytes = pRestType1->bytes - numOfSpaces; + pFunc->node.resType = (SDataType){.bytes = resBytes, .type = pRestType1->type}; + return TSDB_CODE_SUCCESS; +} + +static int32_t translateLtrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + return translateTrimStr(pFunc, pErrBuf, len, true); +} + +static int32_t translateRtrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + return translateTrimStr(pFunc, pErrBuf, len, false); +} + +// The return type is BIGINT type +static int32_t translateOutBigInt(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; return TSDB_CODE_SUCCESS; } -static int32_t translateHLLImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } +static int32_t translateSum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); - if (isPartial) { - pFunc->node.resType = - (SDataType){.bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - } - - return TSDB_CODE_SUCCESS; -} - -static int32_t translateHLLPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateHLLImpl(pFunc, pErrBuf, len, true); -} - -static int32_t translateHLLMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateHLLImpl(pFunc, pErrBuf, len, false); -} - -static int32_t translateHLLState(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateHLLPartial(pFunc, pErrBuf, len); -} - -static int32_t translateHLLStateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type != TSDB_DATA_TYPE_BINARY) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = - (SDataType){.bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static bool validateStateOper(const SValueNode* pVal) { - if (TSDB_DATA_TYPE_BINARY != pVal->node.resType.type) { - return false; - } - if (strlen(varDataVal(pVal->datum.p)) == 2) { - return ( - 0 == strncasecmp(varDataVal(pVal->datum.p), "GT", 2) || 0 == strncasecmp(varDataVal(pVal->datum.p), "GE", 2) || - 0 == strncasecmp(varDataVal(pVal->datum.p), "LT", 2) || 0 == strncasecmp(varDataVal(pVal->datum.p), "LE", 2) || - 0 == strncasecmp(varDataVal(pVal->datum.p), "EQ", 2) || 0 == strncasecmp(varDataVal(pVal->datum.p), "NE", 2)); - } - return false; -} - -static int32_t translateStateCount(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 & param2 - for (int32_t i = 1; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode; - - if (i == 1 && !validateStateOper(pValue)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Second parameter of STATECOUNT function" - "must be one of the following: 'GE', 'GT', 'LE', 'LT', 'EQ', 'NE'"); - } - - pValue->notReserved = true; - } - - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type != TSDB_DATA_TYPE_BINARY || - (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_BIGINT && - getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_DOUBLE)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // set result type - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateStateDuration(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (3 != numOfParams && 4 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1, param2 & param3 - for (int32_t i = 1; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode; - - if (i == 1 && !validateStateOper(pValue)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Second parameter of STATEDURATION function" - "must be one of the following: 'GE', 'GT', 'LE', 'LT', 'EQ', 'NE'"); - } else if (i == 3 && pValue->datum.i == 0) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "STATEDURATION function time unit parameter should be greater than db precision"); - } - - pValue->notReserved = true; - } - - if (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type != TSDB_DATA_TYPE_BINARY || - (getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_BIGINT && - getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type != TSDB_DATA_TYPE_DOUBLE)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (numOfParams == 4 && - getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 3))->type != TSDB_DATA_TYPE_BIGINT) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (numOfParams == 4) { - uint8_t dbPrec = pFunc->node.resType.precision; - - int32_t code = validateTimeUnitParam(dbPrec, (SValueNode*)nodesListGetNode(pFunc->pParameterList, 3)); - if (code == TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL) { - return buildFuncErrMsg(pErrBuf, len, code, - "STATEDURATION function time unit parameter should be greater than db precision"); - } else if (code == TSDB_CODE_FUNC_TIME_UNIT_INVALID) { - return buildFuncErrMsg(pErrBuf, len, code, - "STATEDURATION function time unit parameter should be one of the following: [1b, 1u, 1a, " - "1s, 1m, 1h, 1d, 1w]"); - } - } - - // set result type - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateCsum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t resType; - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } else { - if (IS_SIGNED_NUMERIC_TYPE(colType)) { - resType = TSDB_DATA_TYPE_BIGINT; - } else if (IS_UNSIGNED_NUMERIC_TYPE(colType)) { - resType = TSDB_DATA_TYPE_UBIGINT; - } else if (IS_FLOAT_TYPE(colType)) { - resType = TSDB_DATA_TYPE_DOUBLE; - } else { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; + uint8_t resType = 0; + if (IS_SIGNED_NUMERIC_TYPE(paraType) || TSDB_DATA_TYPE_BOOL == paraType || IS_NULL_TYPE(paraType)) { + resType = TSDB_DATA_TYPE_BIGINT; + } else if (IS_UNSIGNED_NUMERIC_TYPE(paraType)) { + resType = TSDB_DATA_TYPE_UBIGINT; + } else if (IS_FLOAT_TYPE(paraType)) { + resType = TSDB_DATA_TYPE_DOUBLE; } pFunc->node.resType = (SDataType){.bytes = tDataTypes[resType].bytes, .type = resType}; return TSDB_CODE_SUCCESS; } +static int32_t translateWduration(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + // pseudo column do not need to check parameters + pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT, + .precision = pFunc->node.resType.precision}; + return TSDB_CODE_SUCCESS; +} + +static int32_t translateNowToday(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + // pseudo column do not need to check parameters + + // add database precision as param + uint8_t dbPrec = pFunc->node.resType.precision; + int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); + if (code != TSDB_CODE_SUCCESS) { + return code; + } + + pFunc->node.resType = + (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_TIMESTAMP].bytes, .type = TSDB_DATA_TYPE_TIMESTAMP}; + return TSDB_CODE_SUCCESS; +} + +static int32_t translateRand(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + + if (!pFunc->dual) { + int32_t code = addPseudoParam(&pFunc->pParameterList); + if (code != TSDB_CODE_SUCCESS) { + return code; + } + } + + pFunc->node.resType = + (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; + return TSDB_CODE_SUCCESS; +} + +// return type is same as first input parameter's type +static int32_t translateOutFirstIn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + pFunc->node.resType = *getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); + return TSDB_CODE_SUCCESS; +} + +static int32_t translateTimePseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + // pseudo column do not need to check parameters + + pFunc->node.resType = + (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_TIMESTAMP].bytes, .type = TSDB_DATA_TYPE_TIMESTAMP, + .precision = pFunc->node.resType.precision}; + return TSDB_CODE_SUCCESS; +} + +static int32_t translateIsFilledPseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + // pseudo column do not need to check parameters + + pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BOOL].bytes, .type = TSDB_DATA_TYPE_BOOL}; + return TSDB_CODE_SUCCESS; +} + +static int32_t translatePercentile(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); + + // set result type + if (numOfParams > 2) { + pFunc->node.resType = (SDataType){.bytes = 3200, .type = TSDB_DATA_TYPE_VARCHAR}; + } else { + pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; + } + return TSDB_CODE_SUCCESS; +} + +static int32_t translateVgIdColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + // pseudo column do not need to check parameters + pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_INT].bytes, .type = TSDB_DATA_TYPE_INT}; + return TSDB_CODE_SUCCESS; +} + + +static int32_t reserveFirstMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { + int32_t code = nodesListMakeAppend(pParameters, pPartialRes); + if (TSDB_CODE_SUCCESS == code) { + SNode* pNew = NULL; + code = nodesCloneNode(nodesListGetNode(pRawParameters, 1), &pNew); + if (TSDB_CODE_SUCCESS == code) { + code = nodesListStrictAppend(*pParameters, pNew); + } + } + return code; +} + +int32_t topBotCreateMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { + return reserveFirstMergeParam(pRawParameters, pPartialRes, pParameters); +} + +int32_t apercentileCreateMergeParam(SNodeList* pRawParameters, SNode* pPartialRes, SNodeList** pParameters) { + int32_t code = reserveFirstMergeParam(pRawParameters, pPartialRes, pParameters); + if (TSDB_CODE_SUCCESS == code && pRawParameters->length >= 3) { + SNode* pNew = NULL; + code = nodesCloneNode(nodesListGetNode(pRawParameters, 2), &pNew); + if (TSDB_CODE_SUCCESS == code) { + code = nodesListStrictAppend(*pParameters, pNew); + } + } + return code; +} + +static int32_t translateElapsedPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + return 0; +} + +static int32_t translateElapsedMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + return 0; +} + +static int32_t translateCsum(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; + uint8_t resType; + if (IS_SIGNED_NUMERIC_TYPE(colType)) { + resType = TSDB_DATA_TYPE_BIGINT; + } else if (IS_UNSIGNED_NUMERIC_TYPE(colType)) { + resType = TSDB_DATA_TYPE_UBIGINT; + } else if (IS_FLOAT_TYPE(colType)) { + resType = TSDB_DATA_TYPE_DOUBLE; + } else { + return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); + } + pFunc->node.resType = (SDataType){.bytes = tDataTypes[resType].bytes, .type = resType}; + return TSDB_CODE_SUCCESS; +} + static EFuncReturnRows csumEstReturnRows(SFunctionNode* pFunc) { return FUNC_RETURN_ROWS_N; } -static int32_t translateMavg(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (pValue->datum.i < 1 || pValue->datum.i > 1000) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pValue->notReserved = true; - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_NUMERIC_TYPE(colType) || !IS_INTEGER_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateSample(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } +static int32_t translateSampleTail(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); SDataType* pSDataType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); uint8_t colType = pSDataType->type; - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (pValue->datum.i < 1 || pValue->datum.i > 1000) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pValue->notReserved = true; - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - // set result type - if (IS_STR_DATA_TYPE(colType)) { - pFunc->node.resType = (SDataType){.bytes = pSDataType->bytes, .type = colType}; - } else { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[colType].bytes, .type = colType}; - } - - return TSDB_CODE_SUCCESS; -} - -static int32_t translateTail(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SDataType* pSDataType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - uint8_t colType = pSDataType->type; - - // param1 & param2 - for (int32_t i = 1; i < numOfParams; ++i) { - SNode* pParamNode = nodesListGetNode(pFunc->pParameterList, i); - if (QUERY_NODE_VALUE != nodeType(pParamNode)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode; - - if ((IS_SIGNED_NUMERIC_TYPE(pValue->node.resType.type) ? pValue->datum.i : pValue->datum.u) < ((i > 1) ? 0 : 1) || - (IS_SIGNED_NUMERIC_TYPE(pValue->node.resType.type) ? pValue->datum.i : pValue->datum.u) > 100) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "TAIL function second parameter should be in range [1, 100], " - "third parameter should be in range [0, 100]"); - } - - pValue->notReserved = true; - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (!IS_INTEGER_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - // set result type - if (IS_STR_DATA_TYPE(colType)) { - pFunc->node.resType = (SDataType){.bytes = pSDataType->bytes, .type = colType}; - } else { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[colType].bytes, .type = colType}; - } - return TSDB_CODE_SUCCESS; -} - -static int32_t translateDerivative(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (3 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - - // param1 - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - SValueNode* pValue1 = (SValueNode*)pParamNode1; - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (pValue1->datum.i <= 0) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - pValue->notReserved = true; - - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pParamNode2 = nodesListGetNode(pFunc->pParameterList, 2); - SValueNode* pValue2 = (SValueNode*)pParamNode2; - pValue2->notReserved = true; - - if (pValue2->datum.i != 0 && pValue2->datum.i != 1) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; + pFunc->node.resType = (SDataType){.bytes = IS_STR_DATA_TYPE(colType) ? pSDataType->bytes : tDataTypes[colType].bytes, + .type = colType}; return TSDB_CODE_SUCCESS; } @@ -1837,17 +1151,8 @@ static EFuncReturnRows derivativeEstReturnRows(SFunctionNode* pFunc) { : FUNC_RETURN_ROWS_N_MINUS_1; } -static int32_t translateIrate(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - +static int32_t translateAddPrecOutDouble(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); // add database precision as param uint8_t dbPrec = pFunc->node.resType.precision; int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); @@ -1859,117 +1164,8 @@ static int32_t translateIrate(SFunctionNode* pFunc, char* pErrBuf, int32_t len) return TSDB_CODE_SUCCESS; } -static int32_t translateIrateImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (isPartial) { - if (3 != LIST_LENGTH(pFunc->pParameterList) && 4 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - if (!IS_NUMERIC_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - int32_t pkBytes = (pFunc->hasPk) ? pFunc->pkBytes : 0; - pFunc->node.resType = (SDataType){.bytes = getIrateInfoSize(pkBytes) + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - if (TSDB_DATA_TYPE_BINARY != colType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE}; - - // add database precision as param - uint8_t dbPrec = pFunc->node.resType.precision; - int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - } - - - return TSDB_CODE_SUCCESS; -} - -static int32_t translateIratePartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateIrateImpl(pFunc, pErrBuf, len, true); -} - -static int32_t translateIrateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateIrateImpl(pFunc, pErrBuf, len, false); -} - static int32_t translateInterp(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - uint8_t dbPrec = pFunc->node.resType.precision; - - if (2 < numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, 0)); - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if ((!IS_NUMERIC_TYPE(paraType) && !IS_BOOLEAN_TYPE(paraType)) || QUERY_NODE_VALUE == nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (2 == numOfParams) { - nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, 1)); - paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(paraType) || QUERY_NODE_VALUE != nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1); - if (pValue->datum.i != 0 && pValue->datum.i != 1) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "INTERP function second parameter should be 0/1"); - } - - pValue->notReserved = true; - } - -#if 0 - if (3 <= numOfParams) { - int64_t timeVal[2] = {0}; - for (int32_t i = 1; i < 3; ++i) { - nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, i)); - paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, i))->resType.type; - if (!IS_STR_DATA_TYPE(paraType) || QUERY_NODE_VALUE != nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, i); - int32_t ret = convertStringToTimestamp(paraType, pValue->datum.p, dbPrec, &timeVal[i - 1]); - if (ret != TSDB_CODE_SUCCESS) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - if (timeVal[0] > timeVal[1]) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, "INTERP function invalid time range"); - } - } - - if (4 == numOfParams) { - nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, 3)); - paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 3))->resType.type; - if (!IS_INTEGER_TYPE(paraType) || QUERY_NODE_VALUE != nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - int32_t ret = validateTimeUnitParam(dbPrec, (SValueNode*)nodesListGetNode(pFunc->pParameterList, 3)); - if (ret == TIME_UNIT_TOO_SMALL) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "INTERP function time interval parameter should be greater than db precision"); - } else if (ret == TIME_UNIT_INVALID) { - return buildFuncErrMsg( - pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "INTERP function time interval parameter should be one of the following: [1b, 1u, 1a, 1s, 1m, 1h, 1d, 1w]"); - } - } -#endif - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 0))->resType; return TSDB_CODE_SUCCESS; } @@ -1983,102 +1179,6 @@ static EFuncReturnRows interpEstReturnRows(SFunctionNode* pFunc) { } } -static int32_t translateFirstLast(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - // forbid null as first/last input, since first(c0, null, 1) may have different number of input - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - - for (int32_t i = 0; i < numOfParams; ++i) { - uint8_t nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, i)); - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (IS_NULL_TYPE(paraType) && QUERY_NODE_VALUE == nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - pFunc->node.resType = *getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); - return TSDB_CODE_SUCCESS; -} - -static int32_t translateFirstLastImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isPartial) { - // first(col_list) will be rewritten as first(col) - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - uint8_t paraType = getSDataTypeFromNode(pPara)->type; - int32_t paraBytes = getSDataTypeFromNode(pPara)->bytes; - if (isPartial) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - for (int32_t i = 0; i < numOfParams; ++i) { - uint8_t nodeType = nodeType(nodesListGetNode(pFunc->pParameterList, i)); - uint8_t pType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (IS_NULL_TYPE(pType) && QUERY_NODE_VALUE == nodeType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - int32_t pkBytes = (pFunc->hasPk) ? pFunc->pkBytes : 0; - pFunc->node.resType = - (SDataType){.bytes = getFirstLastInfoSize(paraBytes, pkBytes) + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - } else { - if (TSDB_DATA_TYPE_BINARY != paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = ((SExprNode*)pPara)->resType; - } - return TSDB_CODE_SUCCESS; -} - -static int32_t translateFirstLastPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateFirstLastImpl(pFunc, pErrBuf, len, true); -} - -static int32_t translateFirstLastMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateFirstLastImpl(pFunc, pErrBuf, len, false); -} - -static int32_t translateFirstLastState(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - int32_t paraBytes = getSDataTypeFromNode(pPara)->bytes; - - int32_t pkBytes = (pFunc->hasPk) ? pFunc->pkBytes : 0; - pFunc->node.resType = - (SDataType){.bytes = getFirstLastInfoSize(paraBytes, pkBytes) + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateFirstLastStateMerge(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - int32_t paraBytes = getSDataTypeFromNode(pPara)->bytes; - uint8_t paraType = getSDataTypeFromNode(pPara)->type; - if (paraType != TSDB_DATA_TYPE_BINARY) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = paraBytes, .type = TSDB_DATA_TYPE_BINARY}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateUniqueMode(SFunctionNode* pFunc, char* pErrBuf, int32_t len, bool isUnique) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - if (!nodesExprHasColumn(pPara)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, "The parameters of %s must contain columns", - isUnique ? "UNIQUE" : "MODE"); - } - - pFunc->node.resType = ((SExprNode*)pPara)->resType; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateUnique(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateUniqueMode(pFunc, pErrBuf, len, true); -} - -static int32_t translateMode(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - return translateUniqueMode(pFunc, pErrBuf, len, false); -} - static int32_t translateForecast(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); if (2 != numOfParams && 1 != numOfParams) { @@ -2121,37 +1221,8 @@ static int32_t translateForecastConf(SFunctionNode* pFunc, char* pErrBuf, int32_ static EFuncReturnRows forecastEstReturnRows(SFunctionNode* pFunc) { return FUNC_RETURN_ROWS_N; } static int32_t translateDiff(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (numOfParams > 2) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); uint8_t colType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_INTEGER_TYPE(colType) && !IS_FLOAT_TYPE(colType) && TSDB_DATA_TYPE_BOOL != colType && - !IS_TIMESTAMP_TYPE(colType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param1 - if (numOfParams == 2) { - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SNode* pParamNode1 = nodesListGetNode(pFunc->pParameterList, 1); - if (QUERY_NODE_VALUE != nodeType(pParamNode1)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - SValueNode* pValue = (SValueNode*)pParamNode1; - if (pValue->datum.i < 0 || pValue->datum.i > 3) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "Second parameter of DIFF function should be a number between 0 and 3."); - } - - pValue->notReserved = true; - } uint8_t resType; if (IS_SIGNED_NUMERIC_TYPE(colType) || IS_TIMESTAMP_TYPE(colType) || TSDB_DATA_TYPE_BOOL == colType) { @@ -2173,65 +1244,19 @@ static EFuncReturnRows diffEstReturnRows(SFunctionNode* pFunc) { : FUNC_RETURN_ROWS_N_MINUS_1; } -static int32_t translateLength(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_STR_DATA_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateCharLength(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (paraType == TSDB_DATA_TYPE_VARBINARY || (!IS_STR_DATA_TYPE(paraType) && !IS_NULL_TYPE(paraType))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - static int32_t translateConcatImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len, int32_t minParaNum, int32_t maxParaNum, bool hasSep) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (numOfParams < minParaNum || numOfParams > maxParaNum) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } uint8_t resultType = TSDB_DATA_TYPE_BINARY; int32_t resultBytes = 0; int32_t sepBytes = 0; - // concat_ws separator should be constant string - if (hasSep) { - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - if (nodeType(pPara) != QUERY_NODE_VALUE) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, - "The first parameter of CONCAT_WS function can only be constant string"); - } - } - /* For concat/concat_ws function, if params have NCHAR type, promote the final result to NCHAR */ for (int32_t i = 0; i < numOfParams; ++i) { SNode* pPara = nodesListGetNode(pFunc->pParameterList, i); uint8_t paraType = getSDataTypeFromNode(pPara)->type; - if (TSDB_DATA_TYPE_VARBINARY == paraType) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (!IS_STR_DATA_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } if (TSDB_DATA_TYPE_NCHAR == paraType) { resultType = paraType; } @@ -2274,130 +1299,29 @@ static int32_t translateConcatWs(SFunctionNode* pFunc, char* pErrBuf, int32_t le return translateConcatImpl(pFunc, pErrBuf, len, 3, 9, true); } -static int32_t translateSubstr(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SExprNode* pPara0 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 0); - SExprNode* pPara1 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 1); - - uint8_t para0Type = pPara0->resType.type; - uint8_t para1Type = pPara1->resType.type; - if (TSDB_DATA_TYPE_VARBINARY == para0Type || - (!IS_STR_DATA_TYPE(para0Type) && !IS_NULL_TYPE(para0Type)) || - (!IS_INTEGER_TYPE(para1Type) && !IS_NULL_TYPE(para1Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (3 == numOfParams) { - SExprNode* pPara2 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 2); - uint8_t para2Type = pPara2->resType.type; - if (!IS_INTEGER_TYPE(para2Type) && !IS_NULL_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - int64_t v = ((SValueNode*)pPara2)->datum.i; - } - - pFunc->node.resType = (SDataType){.bytes = pPara0->resType.bytes, .type = pPara0->resType.type}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateSubstrIdx(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (3 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SExprNode* pPara0 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 0); - SExprNode* pPara1 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 1); - SExprNode* pPara2 = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 2); - - uint8_t para0Type = pPara0->resType.type; - uint8_t para1Type = pPara1->resType.type; - uint8_t para2Type = pPara2->resType.type; - if (TSDB_DATA_TYPE_VARBINARY == para0Type || (!IS_STR_DATA_TYPE(para0Type) && !IS_NULL_TYPE(para0Type)) || - TSDB_DATA_TYPE_VARBINARY == para1Type || (!IS_STR_DATA_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) || - (!IS_INTEGER_TYPE(para2Type) && !IS_NULL_TYPE(para2Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = pPara0->resType.bytes, .type = pPara0->resType.type}; - return TSDB_CODE_SUCCESS; -} - static int32_t translateChar(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - SNode *node; - FOREACH(node, pFunc->pParameterList) { - uint8_t paraType = getSDataTypeFromNode(node)->type; - if (paraType == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(paraType) && !IS_NUMERIC_TYPE(paraType) && !IS_NULL_TYPE(paraType))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); pFunc->node.resType = (SDataType){.bytes = 4 * numOfParams + 2, .type = TSDB_DATA_TYPE_VARCHAR}; return TSDB_CODE_SUCCESS; } static int32_t translateAscii(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (paraType == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(paraType) && !IS_NULL_TYPE(paraType))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_UTINYINT].bytes, .type = TSDB_DATA_TYPE_UTINYINT}; return TSDB_CODE_SUCCESS; } -static int32_t translatePosition(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para0Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para0Type == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(para0Type) && !IS_NULL_TYPE(para0Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (para1Type == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(para1Type) && !IS_NULL_TYPE(para1Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - static int32_t translateTrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList) && 1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + uint8_t para0Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para0Type == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(para0Type) && !IS_NULL_TYPE(para0Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - int32_t resLen = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->bytes; uint8_t type = para0Type; if (2 == LIST_LENGTH(pFunc->pParameterList)) { uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (para1Type == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(para1Type) && !IS_NULL_TYPE(para1Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } resLen = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->bytes; type = para1Type; } @@ -2414,17 +1338,7 @@ static int32_t translateTrim(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { } static int32_t translateReplace(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (3 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - for (int32_t i = 0; i < 3; ++i) { - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (paraType == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(paraType) && !IS_NULL_TYPE(paraType))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); uint8_t orgType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; uint8_t fromType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; @@ -2448,20 +1362,7 @@ static int32_t translateReplace(SFunctionNode* pFunc, char* pErrBuf, int32_t len } static int32_t translateRepeat(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para0Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para0Type == TSDB_DATA_TYPE_VARBINARY || - (!IS_STR_DATA_TYPE(para0Type) && !IS_NULL_TYPE(para0Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); uint8_t type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; int32_t orgLen = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->bytes; @@ -2512,25 +1413,11 @@ static int32_t translateCast(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { } static int32_t translateToIso8601(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (1 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - // param0 - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_INTEGER_TYPE(paraType) && !IS_TIMESTAMP_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - // param1 if (numOfParams == 2) { - SNode* pNode = (SNode*)nodesListGetNode(pFunc->pParameterList, 1); - if (QUERY_NODE_VALUE != nodeType(pNode)) { - return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, "Not supported timzone format"); - } - - SValueNode* pValue = (SValueNode*)pNode; + SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1); if (!validateTimezoneFormat(pValue)) { return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, "Invalid timzone format"); } @@ -2547,24 +1434,10 @@ static int32_t translateToIso8601(SFunctionNode* pFunc, char* pErrBuf, int32_t l } static int32_t translateToUnixtimestamp(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); int16_t resType = TSDB_DATA_TYPE_BIGINT; - - if (1 != numOfParams && 2 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para1Type == TSDB_DATA_TYPE_VARBINARY || !IS_STR_DATA_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - if (2 == numOfParams) { - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1); if (pValue->datum.i == 1) { resType = TSDB_DATA_TYPE_TIMESTAMP; @@ -2588,71 +1461,17 @@ static int32_t translateToUnixtimestamp(SFunctionNode* pFunc, char* pErrBuf, int } static int32_t translateToTimestamp(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (LIST_LENGTH(pFunc->pParameterList) != 2) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_STR_DATA_TYPE(para1Type) || !IS_STR_DATA_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_TIMESTAMP].bytes, .type = TSDB_DATA_TYPE_TIMESTAMP}; return TSDB_CODE_SUCCESS; } -static int32_t translateToChar(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (LIST_LENGTH(pFunc->pParameterList) != 2) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - // currently only support to_char(timestamp, str) - if (!IS_STR_DATA_TYPE(para2Type) || !IS_TIMESTAMP_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - pFunc->node.resType = (SDataType){.bytes = 4096, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - static int32_t translateTimeTruncate(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if ((!IS_STR_DATA_TYPE(para1Type) && !IS_INTEGER_TYPE(para1Type) && !IS_TIMESTAMP_TYPE(para1Type)) || - !IS_INTEGER_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); uint8_t dbPrec = pFunc->node.resType.precision; - int32_t code = validateTimeUnitParam(dbPrec, (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1)); - if (code == TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL) { - return buildFuncErrMsg(pErrBuf, len, code, - "TIMETRUNCATE function time unit parameter should be greater than db precision"); - } else if (code == TSDB_CODE_FUNC_TIME_UNIT_INVALID) { - return buildFuncErrMsg( - pErrBuf, len, code, - "TIMETRUNCATE function time unit parameter should be one of the following: [1b, 1u, 1a, 1s, 1m, 1h, 1d, 1w]"); - } - - if (3 == numOfParams) { - uint8_t para3Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type; - if (!IS_INTEGER_TYPE(para3Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, 2); - if (pValue->datum.i != 0 && pValue->datum.i != 1) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - } - // add database precision as param - - code = addUint8Param(&pFunc->pParameterList, dbPrec); + int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); if (code != TSDB_CODE_SUCCESS) { return code; } @@ -2668,121 +1487,8 @@ static int32_t translateTimeTruncate(SFunctionNode* pFunc, char* pErrBuf, int32_ return TSDB_CODE_SUCCESS; } -static int32_t translateTimeDiff(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); - if (2 != numOfParams && 3 != numOfParams) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - for (int32_t i = 0; i < 2; ++i) { - uint8_t paraType = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, i))->type; - if (!IS_STR_DATA_TYPE(paraType) && !IS_INTEGER_TYPE(paraType) && !IS_TIMESTAMP_TYPE(paraType) && !IS_NULL_TYPE(paraType)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - uint8_t para2Type; - if (3 == numOfParams) { - para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 2))->type; - if (!IS_INTEGER_TYPE(para2Type) && !IS_NULL_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - } - - // add database precision as param - uint8_t dbPrec = pFunc->node.resType.precision; - - if (3 == numOfParams && !IS_NULL_TYPE(para2Type)) { - int32_t code = validateTimeUnitParam(dbPrec, (SValueNode*)nodesListGetNode(pFunc->pParameterList, 2)); - if (code == TSDB_CODE_FUNC_TIME_UNIT_TOO_SMALL) { - return buildFuncErrMsg(pErrBuf, len, code, - "TIMEDIFF function time unit parameter should be greater than db precision"); - } else if (code == TSDB_CODE_FUNC_TIME_UNIT_INVALID) { - return buildFuncErrMsg( - pErrBuf, len, code, - "TIMEDIFF function time unit parameter should be one of the following: [1b, 1u, 1a, 1s, 1m, 1h, 1d, 1w]"); - } - } - - int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateWeekday(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if ((!IS_STR_DATA_TYPE(para1Type) && !IS_INTEGER_TYPE(para1Type) && - !IS_TIMESTAMP_TYPE(para1Type) && !IS_NULL_TYPE(para1Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - // add database precision as param - uint8_t dbPrec = pFunc->node.resType.precision; - - int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateWeek(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList) && 2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if ((!IS_STR_DATA_TYPE(para1Type) && !IS_INTEGER_TYPE(para1Type) && - !IS_TIMESTAMP_TYPE(para1Type)) && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - if (2 == LIST_LENGTH(pFunc->pParameterList)) { - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if (!IS_INTEGER_TYPE(para2Type) && !IS_NULL_TYPE(para2Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - if (IS_INTEGER_TYPE(para2Type)) { - SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, 1); - if (pValue->datum.i < 0 || pValue->datum.i > 7) { - return invaildFuncParaValueErrMsg(pErrBuf, len, pFunc->functionName); - } - } - } - - // add database precision as param - uint8_t dbPrec = pFunc->node.resType.precision; - - int32_t code = addUint8Param(&pFunc->pParameterList, dbPrec); - if (code != TSDB_CODE_SUCCESS) { - return code; - } - - pFunc->node.resType = - (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateWeekofyear(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if ((!IS_STR_DATA_TYPE(para1Type) && !IS_INTEGER_TYPE(para1Type) && - !IS_TIMESTAMP_TYPE(para1Type)) && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } +static int32_t translateAddPrecOutBigint(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); // add database precision as param uint8_t dbPrec = pFunc->node.resType.precision; @@ -2798,78 +1504,27 @@ static int32_t translateWeekofyear(SFunctionNode* pFunc, char* pErrBuf, int32_t } static int32_t translateToJson(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - SExprNode* pPara = (SExprNode*)nodesListGetNode(pFunc->pParameterList, 0); - if (QUERY_NODE_VALUE != nodeType(pPara) || TSDB_DATA_TYPE_VARBINARY == pPara->resType.type || (!IS_VAR_DATA_TYPE(pPara->resType.type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_JSON].bytes, .type = TSDB_DATA_TYPE_JSON}; return TSDB_CODE_SUCCESS; } -static int32_t translateInStrOutGeom(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (!IS_STR_DATA_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - +static int32_t translateOutGeom(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_GEOMETRY].bytes, .type = TSDB_DATA_TYPE_GEOMETRY}; return TSDB_CODE_SUCCESS; } static int32_t translateInGeomOutStr(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para1Type != TSDB_DATA_TYPE_GEOMETRY && !IS_NULL_TYPE(para1Type)) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_VARCHAR].bytes, .type = TSDB_DATA_TYPE_VARCHAR}; return TSDB_CODE_SUCCESS; } -static int32_t translateIn2NumOutGeom(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if ((!IS_NUMERIC_TYPE(para1Type) && !IS_NULL_TYPE(para1Type)) || - (!IS_NUMERIC_TYPE(para2Type) && !IS_NULL_TYPE(para2Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_GEOMETRY].bytes, .type = TSDB_DATA_TYPE_GEOMETRY}; - - return TSDB_CODE_SUCCESS; -} - static int32_t translateIn2GeomOutBool(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (2 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); - } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - uint8_t para2Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 1))->type; - if ((para1Type != TSDB_DATA_TYPE_GEOMETRY && !IS_NULL_TYPE(para1Type)) || - (para2Type != TSDB_DATA_TYPE_GEOMETRY && !IS_NULL_TYPE(para2Type))) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BOOL].bytes, .type = TSDB_DATA_TYPE_BOOL}; return TSDB_CODE_SUCCESS; @@ -2885,11 +1540,6 @@ static int32_t translateBlockDistFunc(SFunctionNode* pFunc, char* pErrBuf, int32 return TSDB_CODE_SUCCESS; } -static int32_t translateBlockDistInfoFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = 128, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - static bool getBlockDistFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv) { pEnv->calcMemSize = sizeof(STableBlockDistInfo); return true; @@ -2899,63 +1549,104 @@ static int32_t translateGroupKey(SFunctionNode* pFunc, char* pErrBuf, int32_t le if (1 != LIST_LENGTH(pFunc->pParameterList)) { return TSDB_CODE_SUCCESS; } - - SNode* pPara = nodesListGetNode(pFunc->pParameterList, 0); - pFunc->node.resType = ((SExprNode*)pPara)->resType; + pFunc->node.resType = *getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0)); return TSDB_CODE_SUCCESS; } -static int32_t translateDatabaseFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TSDB_DB_NAME_LEN, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateClientVersionFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TSDB_VERSION_LEN, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateServerVersionFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TSDB_VERSION_LEN, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} static int32_t translateServerStatusFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_INT].bytes, .type = TSDB_DATA_TYPE_INT}; return TSDB_CODE_SUCCESS; } -static int32_t translateCurrentUserFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TSDB_USER_LEN, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateUserFunc(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = TSDB_USER_LEN, .type = TSDB_DATA_TYPE_VARCHAR}; - return TSDB_CODE_SUCCESS; -} - static int32_t translateTagsPseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { // The _tags pseudo-column will be expanded to the actual tags on the client side return TSDB_CODE_SUCCESS; } -static int32_t translateTableCountPseudoColumn(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - pFunc->node.resType = (SDataType){.bytes = tDataTypes[TSDB_DATA_TYPE_BIGINT].bytes, .type = TSDB_DATA_TYPE_BIGINT}; - return TSDB_CODE_SUCCESS; -} - -static int32_t translateMd5(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { - if (1 != LIST_LENGTH(pFunc->pParameterList)) { - return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName); +static int32_t translateOutVarchar(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + int32_t bytes = 0; + switch (pFunc->funcType) { + case FUNCTION_TYPE_MD5: + bytes = MD5_OUTPUT_LEN + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_USER: + case FUNCTION_TYPE_CURRENT_USER: + bytes = TSDB_USER_LEN; + break; + case FUNCTION_TYPE_SERVER_VERSION: + case FUNCTION_TYPE_CLIENT_VERSION: + bytes = TSDB_VERSION_LEN; + break; + case FUNCTION_TYPE_DATABASE: + bytes = TSDB_DB_NAME_LEN; + break; + case FUNCTION_TYPE_BLOCK_DIST: + case FUNCTION_TYPE_BLOCK_DIST_INFO: + bytes = 128; + break; + case FUNCTION_TYPE_TO_CHAR: + bytes = 4096; + break; + case FUNCTION_TYPE_HYPERLOGLOG_STATE_MERGE: + case FUNCTION_TYPE_HYPERLOGLOG_PARTIAL: + case FUNCTION_TYPE_HYPERLOGLOG_STATE: + bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_SPREAD_PARTIAL: + case FUNCTION_TYPE_SPREAD_STATE: + case FUNCTION_TYPE_SPREAD_STATE_MERGE: + bytes = getSpreadInfoSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_APERCENTILE_PARTIAL: + bytes = getApercentileMaxSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_STD_STATE: + case FUNCTION_TYPE_STD_STATE_MERGE: + case FUNCTION_TYPE_STD_PARTIAL: + bytes = getStdInfoSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_AVG_PARTIAL: + case FUNCTION_TYPE_AVG_STATE: + case FUNCTION_TYPE_AVG_STATE_MERGE: + bytes = getAvgInfoSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_HISTOGRAM_PARTIAL: + bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_HISTOGRAM: + case FUNCTION_TYPE_HISTOGRAM_MERGE: + bytes = 512; + break; + case FUNCTION_TYPE_LEASTSQUARES: + bytes = LEASTSQUARES_BUFF_LENGTH; + break; + case FUNCTION_TYPE_TBNAME: + bytes = TSDB_TABLE_FNAME_LEN - 1 + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_TIMEZONE: + bytes = TD_TIMEZONE_LEN; + break; + case FUNCTION_TYPE_IRATE_PARTIAL: + bytes = getIrateInfoSize((pFunc->hasPk) ? pFunc->pkBytes : 0) + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_FIRST_PARTIAL: + case FUNCTION_TYPE_LAST_PARTIAL: + case FUNCTION_TYPE_FIRST_STATE: + case FUNCTION_TYPE_LAST_STATE: + bytes = getFirstLastInfoSize(getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->bytes, + (pFunc->hasPk) ? pFunc->pkBytes : 0) + VARSTR_HEADER_SIZE; + break; + case FUNCTION_TYPE_FIRST_STATE_MERGE: + case FUNCTION_TYPE_LAST_STATE_MERGE: + bytes = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->bytes; + break; + default: + bytes = 0; + break; } - - uint8_t para1Type = getSDataTypeFromNode(nodesListGetNode(pFunc->pParameterList, 0))->type; - if (para1Type != TSDB_DATA_TYPE_VARCHAR) { - return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName); - } - - pFunc->node.resType = (SDataType){.bytes = MD5_OUTPUT_LEN + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_VARCHAR}; + pFunc->node.resType = (SDataType){.bytes = bytes, .type = TSDB_DATA_TYPE_VARCHAR}; return TSDB_CODE_SUCCESS; } @@ -2965,7 +1656,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "count", .type = FUNCTION_TYPE_COUNT, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_TSMA_FUNC | FUNC_MGT_COUNT_LIKE_FUNC, - .translateFunc = translateCount, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .dataRequiredFunc = countDataRequired, .getEnvFunc = getCountFuncEnv, .initFunc = functionSetup, @@ -2984,6 +1688,18 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "sum", .type = FUNCTION_TYPE_SUM, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_TSMA_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_UBIGINT_TYPE}}, .translateFunc = translateSum, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getSumFuncEnv, @@ -3003,6 +1719,18 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "min", .type = FUNCTION_TYPE_MIN, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_SELECT_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_TSMA_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateMinMax, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getMinmaxFuncEnv, @@ -3019,6 +1747,18 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "max", .type = FUNCTION_TYPE_MAX, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_SELECT_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_TSMA_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateMinMax, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getMinmaxFuncEnv, @@ -3035,7 +1775,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "stddev", .type = FUNCTION_TYPE_STDDEV, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunction, @@ -3053,7 +1806,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_std_partial", .type = FUNCTION_TYPE_STD_PARTIAL, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateStdPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunction, @@ -3067,7 +1832,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_stddev_merge", .type = FUNCTION_TYPE_STDDEV_MERGE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateStdMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunctionMerge, @@ -3083,7 +1860,27 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "leastsquares", .type = FUNCTION_TYPE_LEASTSQUARES, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, - .translateFunc = translateLeastSQR, + .parameters = {.minParamNum = 3, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getLeastSQRFuncEnv, .initFunc = leastSQRFunctionSetup, .processFunc = leastSQRFunction, @@ -3098,7 +1895,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "avg", .type = FUNCTION_TYPE_AVG, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, @@ -3118,7 +1928,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_avg_partial", .type = FUNCTION_TYPE_AVG_PARTIAL, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateAvgPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, @@ -3133,7 +1955,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_avg_merge", .type = FUNCTION_TYPE_AVG_MERGE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateAvgMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, .processFunc = avgFunctionMerge, @@ -3149,6 +1983,27 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "percentile", .type = FUNCTION_TYPE_PERCENTILE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_REPEAT_SCAN_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_FORBID_STREAM_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 11, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 11, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translatePercentile, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getPercentileFuncEnv, @@ -3166,7 +2021,38 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "apercentile", .type = FUNCTION_TYPE_APERCENTILE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateApercentile, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedStrValue = {"default", "t-digest"}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getApercentileFuncEnv, .initFunc = apercentileFunctionSetup, .processFunc = apercentileFunction, @@ -3184,7 +2070,38 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_apercentile_partial", .type = FUNCTION_TYPE_APERCENTILE_PARTIAL, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateApercentilePartial, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedStrValue = {"default", "t-digest"}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getApercentileFuncEnv, .initFunc = apercentileFunctionSetup, .processFunc = apercentileFunction, @@ -3198,7 +2115,38 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_apercentile_merge", .type = FUNCTION_TYPE_APERCENTILE_MERGE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateApercentileMerge, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedStrValue = {"default", "t-digest"}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getApercentileFuncEnv, .initFunc = apercentileFunctionSetup, .processFunc = apercentileFunctionMerge, @@ -3213,7 +2161,28 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_TOP, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_FILL_FUNC | FUNC_MGT_IGNORE_NULL_FUNC, - .translateFunc = translateTopBot, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getTopBotFuncEnv, .initFunc = topBotFunctionSetup, .processFunc = topFunction, @@ -3229,7 +2198,28 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_BOTTOM, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_FILL_FUNC | FUNC_MGT_IGNORE_NULL_FUNC, - .translateFunc = translateTopBot, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getTopBotFuncEnv, .initFunc = topBotFunctionSetup, .processFunc = bottomFunction, @@ -3244,7 +2234,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "spread", .type = FUNCTION_TYPE_SPREAD, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateSpread, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getSpreadFuncEnv, .initFunc = spreadFunctionSetup, @@ -3263,7 +2265,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_spread_partial", .type = FUNCTION_TYPE_SPREAD_PARTIAL, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateSpreadPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getSpreadFuncEnv, .initFunc = spreadFunctionSetup, @@ -3277,8 +2291,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { { .name = "_spread_merge", .type = FUNCTION_TYPE_SPREAD_MERGE, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateSpreadMerge, + .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getSpreadFuncEnv, .initFunc = spreadFunctionSetup, @@ -3296,8 +2322,30 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_ELAPSED, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_INTERVAL_INTERPO_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, + .isTs = true, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isTimeUnit = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .dataRequiredFunc = statisDataRequired, - .translateFunc = translateElapsed, + .translateFunc = translateOutDouble, .getEnvFunc = getElapsedFuncEnv, .initFunc = elapsedFunctionSetup, .processFunc = elapsedFunction, @@ -3342,69 +2390,198 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_INTERP, .classification = FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_INTERVAL_INTERPO_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateInterp, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_NOT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getSelectivityFuncEnv, .initFunc = functionSetup, .processFunc = NULL, .finalizeFunc = NULL, - .estimateReturnRowsFunc = interpEstReturnRows + .estimateReturnRowsFunc = interpEstReturnRows, }, { .name = "derivative", .type = FUNCTION_TYPE_DERIVATIVE, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_CUMULATIVE_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateDerivative, + .parameters = {.minParamNum = 3, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = DBL_MAX}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getDerivativeFuncEnv, .initFunc = derivativeFuncSetup, .processFunc = derivativeFunction, .sprocessFunc = derivativeScalarFunction, .finalizeFunc = functionFinalize, - .estimateReturnRowsFunc = derivativeEstReturnRows + .estimateReturnRowsFunc = derivativeEstReturnRows, }, { .name = "irate", .type = FUNCTION_TYPE_IRATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateIrate, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateAddPrecOutDouble, .getEnvFunc = getIrateFuncEnv, .initFunc = irateFuncSetup, .processFunc = irateFunction, .sprocessFunc = irateScalarFunction, .finalizeFunc = irateFinalize, .pPartialFunc = "_irate_partial", - .pMergeFunc = "_irate_merge" + .pMergeFunc = "_irate_merge", }, { .name = "_irate_partial", .type = FUNCTION_TYPE_IRATE_PARTIAL, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateIratePartial, + .parameters = {.minParamNum = 3, + .maxParamNum = 4, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_TINYINT_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][2] = {.isLastParam = false, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, + .isPK = false, + .isTs = true, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][3] = {.isLastParam = true, + .startParam = 4, + .endParam = 4, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, + .isPK = true, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getIrateFuncEnv, .initFunc = irateFuncSetup, .processFunc = irateFunction, .sprocessFunc = irateScalarFunction, - .finalizeFunc = iratePartialFinalize + .finalizeFunc = iratePartialFinalize, }, { .name = "_irate_merge", .type = FUNCTION_TYPE_IRATE_MERGE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateIrateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateAddPrecOutDouble, .getEnvFunc = getIrateFuncEnv, .initFunc = irateFuncSetup, .processFunc = irateFunctionMerge, .sprocessFunc = irateScalarFunction, - .finalizeFunc = irateFinalize + .finalizeFunc = irateFinalize, }, { .name = "last_row", .type = FUNCTION_TYPE_LAST_ROW, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLast, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = lastDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, @@ -3413,35 +2590,74 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .pPartialFunc = "_last_row_partial", .pMergeFunc = "_last_row_merge", .finalizeFunc = firstLastFinalize, - .combineFunc = lastCombine + .combineFunc = lastCombine, }, { .name = "_cache_last_row", .type = FUNCTION_TYPE_CACHE_LAST_ROW, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, - .translateFunc = translateFirstLast, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = cachedLastRowFunction, - .finalizeFunc = firstLastFinalize + .finalizeFunc = firstLastFinalize, }, { .name = "_cache_last", .type = FUNCTION_TYPE_CACHE_LAST, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC, - .translateFunc = translateFirstLast, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = lastFunctionMerge, - .finalizeFunc = firstLastFinalize + .finalizeFunc = firstLastFinalize, }, { .name = "_last_row_partial", .type = FUNCTION_TYPE_LAST_PARTIAL, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dynDataRequiredFunc = lastDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, @@ -3453,7 +2669,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = lastFunctionMerge, @@ -3464,7 +2693,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_FIRST, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateFirstLast, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = firstDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, @@ -3481,7 +2723,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_FIRST_PARTIAL, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dynDataRequiredFunc = firstDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, @@ -3494,7 +2748,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_FIRST_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = firstFunctionMerge, @@ -3508,7 +2775,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateFirstLast, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = lastDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = firstLastFunctionSetup, @@ -3525,7 +2805,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST_PARTIAL, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastPartial, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dynDataRequiredFunc = lastDynDataReq, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, @@ -3538,7 +2831,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_IGNORE_NULL_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = lastFunctionMerge, @@ -3552,19 +2858,75 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_TWA, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_INTERVAL_INTERPO_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getTwaFuncEnv, .initFunc = twaFunctionSetup, .processFunc = twaFunction, .sprocessFunc = twaScalarFunction, - .finalizeFunc = twaFinalize + .finalizeFunc = twaFinalize, }, { .name = "histogram", .type = FUNCTION_TYPE_HISTOGRAM, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_FORBID_FILL_FUNC | FUNC_MGT_FORBID_STREAM_FUNC, - .translateFunc = translateHistogram, + .parameters = {.minParamNum = 4, + .maxParamNum = 4, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedStrValue = {"user_input", "linear_bin", "log_bin"}}, + .inputParaInfo[0][2] = {.isLastParam = false, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .isHistogramBin = true, + .hasRange = false}, + .inputParaInfo[0][3] = {.isLastParam = true, + .startParam = 4, + .endParam = 4, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getHistogramFuncEnv, .initFunc = histogramFunctionSetup, .processFunc = histogramFunction, @@ -3581,7 +2943,49 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_histogram_partial", .type = FUNCTION_TYPE_HISTOGRAM_PARTIAL, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_FORBID_FILL_FUNC, - .translateFunc = translateHistogramPartial, + .parameters = {.minParamNum = 4, + .maxParamNum = 4, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedStrValue = {"user_input", "linear_bin", "log_bin"}}, + .inputParaInfo[0][2] = {.isLastParam = false, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][3] = {.isLastParam = true, + .startParam = 4, + .endParam = 4, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getHistogramFuncEnv, .initFunc = histogramFunctionSetup, .processFunc = histogramFunctionPartial, @@ -3595,7 +2999,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_histogram_merge", .type = FUNCTION_TYPE_HISTOGRAM_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_FORBID_FILL_FUNC, - .translateFunc = translateHistogramMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getHistogramFuncEnv, .initFunc = functionSetup, .processFunc = histogramFunctionMerge, @@ -3609,7 +3026,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "hyperloglog", .type = FUNCTION_TYPE_HYPERLOGLOG, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_COUNT_LIKE_FUNC, - .translateFunc = translateHLL, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = getHLLFuncEnv, .initFunc = functionSetup, .processFunc = hllFunction, @@ -3620,13 +3050,26 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { #endif .combineFunc = hllCombine, .pPartialFunc = "_hyperloglog_partial", - .pMergeFunc = "_hyperloglog_merge" + .pMergeFunc = "_hyperloglog_merge", }, { .name = "_hyperloglog_partial", .type = FUNCTION_TYPE_HYPERLOGLOG_PARTIAL, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateHLLPartial, + .translateFunc = translateOutVarchar, .getEnvFunc = getHLLFuncEnv, .initFunc = functionSetup, .processFunc = hllFunction, @@ -3639,8 +3082,21 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { { .name = "_hyperloglog_merge", .type = FUNCTION_TYPE_HYPERLOGLOG_MERGE, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateHLLMerge, + .translateFunc = translateOutBigInt, .getEnvFunc = getHLLFuncEnv, .initFunc = functionSetup, .processFunc = hllFunctionMerge, @@ -3656,6 +3112,30 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_DIFF, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_PROCESS_BY_ROW | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_CUMULATIVE_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 4, + .fixedNumValue = {0, 1, 2, 3}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateDiff, .getEnvFunc = getDiffFuncEnv, .initFunc = diffFunctionSetup, @@ -3670,30 +3150,119 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_STATE_COUNT, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, - .translateFunc = translateStateCount, + .parameters = {.minParamNum = 3, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 6, + .fixedStrValue = {"LT", "GT", "LE", "GE", "NE", "EQ"}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_BIGINT_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = getStateFuncEnv, .initFunc = functionSetup, .processFunc = stateCountFunction, .sprocessFunc = stateCountScalarFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "stateduration", .type = FUNCTION_TYPE_STATE_DURATION, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, - .translateFunc = translateStateDuration, + .parameters = {.minParamNum = 3, + .maxParamNum = 4, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 6, + .fixedStrValue = {"LT", "GT", "LE", "GE", "NE", "EQ"}}, + .inputParaInfo[0][2] = {.isLastParam = false, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_BIGINT_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][3] = {.isLastParam = true, + .startParam = 4, + .endParam = 4, + .validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .isTimeUnit = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = getStateFuncEnv, .initFunc = functionSetup, .processFunc = stateDurationFunction, .sprocessFunc = stateDurationScalarFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "csum", .type = FUNCTION_TYPE_CSUM, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_CUMULATIVE_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_UBIGINT_TYPE}}, .translateFunc = translateCsum, .getEnvFunc = getCsumFuncEnv, .initFunc = functionSetup, @@ -3707,438 +3276,1045 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_MAVG, .classification = FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_TIMELINE_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC, - .translateFunc = translateMavg, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = 1000.0}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getMavgFuncEnv, .initFunc = mavgFunctionSetup, .processFunc = mavgFunction, .sprocessFunc = mavgScalarFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "sample", .type = FUNCTION_TYPE_SAMPLE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_ROWS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_FORBID_FILL_FUNC, - .translateFunc = translateSample, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = 1000.0}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateSampleTail, .getEnvFunc = getSampleFuncEnv, .initFunc = sampleFunctionSetup, .processFunc = sampleFunction, .sprocessFunc = sampleScalarFunction, - .finalizeFunc = sampleFinalize + .finalizeFunc = sampleFinalize, }, { .name = "tail", .type = FUNCTION_TYPE_TAIL, .classification = FUNC_MGT_SELECT_FUNC | FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC, - .translateFunc = translateTail, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 1.0, .dMaxVal = 100.0}}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = true, + .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateSampleTail, .getEnvFunc = getTailFuncEnv, .initFunc = tailFunctionSetup, .processFunc = tailFunction, .sprocessFunc = tailScalarFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "unique", .type = FUNCTION_TYPE_UNIQUE, .classification = FUNC_MGT_SELECT_FUNC | FUNC_MGT_INDEFINITE_ROWS_FUNC | FUNC_MGT_FORBID_STREAM_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateUnique, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .hasColumn = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getUniqueFuncEnv, .initFunc = uniqueFunctionSetup, .processFunc = uniqueFunction, .sprocessFunc = uniqueScalarFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "mode", .type = FUNCTION_TYPE_MODE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_FORBID_STREAM_FUNC, - .translateFunc = translateMode, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .hasColumn = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = getModeFuncEnv, .initFunc = modeFunctionSetup, .processFunc = modeFunction, .sprocessFunc = modeScalarFunction, .finalizeFunc = modeFinalize, - .cleanupFunc = modeFunctionCleanupExt + .cleanupFunc = modeFunctionCleanupExt, }, { .name = "abs", .type = FUNCTION_TYPE_ABS, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutNum, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = absFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "log", .type = FUNCTION_TYPE_LOG, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateLogarithm, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = logFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "pow", .type = FUNCTION_TYPE_POW, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateIn2NumOutDou, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = powFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "sqrt", .type = FUNCTION_TYPE_SQRT, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = sqrtFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "ceil", .type = FUNCTION_TYPE_CEIL, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutNum, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = ceilFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "floor", .type = FUNCTION_TYPE_FLOOR, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutNum, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = floorFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "round", .type = FUNCTION_TYPE_ROUND, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateRound, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutNum, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = roundFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "sin", .type = FUNCTION_TYPE_SIN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = sinFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "cos", .type = FUNCTION_TYPE_COS, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = cosFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "tan", .type = FUNCTION_TYPE_TAN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = tanFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "asin", .type = FUNCTION_TYPE_ASIN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = asinFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "acos", .type = FUNCTION_TYPE_ACOS, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = acosFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "atan", .type = FUNCTION_TYPE_ATAN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = atanFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "length", .type = FUNCTION_TYPE_LENGTH, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateLength, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = lengthFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "char_length", .type = FUNCTION_TYPE_CHAR_LENGTH, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateCharLength, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = charLengthFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "concat", .type = FUNCTION_TYPE_CONCAT, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 8, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 8, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateConcat, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = concatFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "concat_ws", .type = FUNCTION_TYPE_CONCAT_WS, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 3, + .maxParamNum = 9, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 9, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateConcatWs, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = concatWsFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "lower", .type = FUNCTION_TYPE_LOWER, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = lowerFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "upper", .type = FUNCTION_TYPE_UPPER, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateInOutStr, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = upperFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "ltrim", .type = FUNCTION_TYPE_LTRIM, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateLtrim, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = ltrimFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "rtrim", .type = FUNCTION_TYPE_RTRIM, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateRtrim, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = rtrimFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "substr", .type = FUNCTION_TYPE_SUBSTR, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateSubstr, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = substrFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "cast", .type = FUNCTION_TYPE_CAST, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE | FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateCast, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = castFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "to_iso8601", .type = FUNCTION_TYPE_TO_ISO8601, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateToIso8601, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = toISO8601Function, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "to_unixtimestamp", .type = FUNCTION_TYPE_TO_UNIXTIMESTAMP, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateToUnixtimestamp, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = toUnixtimestampFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "timetruncate", .type = FUNCTION_TYPE_TIMETRUNCATE, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .isTimeUnit = true}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 2, + .fixedNumValue = {0, 1}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimeTruncate, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = timeTruncateFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "timediff", .type = FUNCTION_TYPE_TIMEDIFF, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTimeDiff, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .isTimeUnit = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, + .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = timeDiffFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "now", .type = FUNCTION_TYPE_NOW, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateNowToday, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = nowFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "today", .type = FUNCTION_TYPE_TODAY, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateNowToday, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = todayFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "timezone", .type = FUNCTION_TYPE_TIMEZONE, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTimezone, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = timezoneFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "tbname", .type = FUNCTION_TYPE_TBNAME, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, - .translateFunc = translateTbnameColumn, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = qPseudoTagFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_qstart", .type = FUNCTION_TYPE_QSTART, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_CLIENT_PC_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimePseudoColumn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = NULL, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_qend", .type = FUNCTION_TYPE_QEND, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_CLIENT_PC_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimePseudoColumn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = NULL, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_qduration", .type = FUNCTION_TYPE_QDURATION, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_CLIENT_PC_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateWduration, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = NULL, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_wstart", .type = FUNCTION_TYPE_WSTART, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_SKIP_SCAN_CHECK_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimePseudoColumn, .getEnvFunc = getTimePseudoFuncEnv, .initFunc = NULL, .sprocessFunc = winStartTsFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_wend", .type = FUNCTION_TYPE_WEND, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_SKIP_SCAN_CHECK_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimePseudoColumn, .getEnvFunc = getTimePseudoFuncEnv, .initFunc = NULL, .sprocessFunc = winEndTsFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_wduration", .type = FUNCTION_TYPE_WDURATION, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_SKIP_SCAN_CHECK_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateWduration, .getEnvFunc = getTimePseudoFuncEnv, .initFunc = NULL, .sprocessFunc = winDurFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "to_json", .type = FUNCTION_TYPE_TO_JSON, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_JSON_TYPE}}, .translateFunc = translateToJson, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = toJsonFunction, - .finalizeFunc = NULL + .finalizeFunc = NULL, }, { .name = "_select_value", @@ -4150,13 +4326,17 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .processFunc = NULL, .finalizeFunc = NULL, .pPartialFunc = "_select_value", - .pMergeFunc = "_select_value" + .pMergeFunc = "_select_value", }, { .name = "_block_dist", .type = FUNCTION_TYPE_BLOCK_DIST, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_FORBID_STREAM_FUNC, - .translateFunc = translateBlockDistFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getBlockDistFuncEnv, .initFunc = blockDistSetup, .processFunc = blockDistFunction, @@ -4166,7 +4346,11 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_block_dist_info", .type = FUNCTION_TYPE_BLOCK_DIST_INFO, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC, - .translateFunc = translateBlockDistInfoFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "_group_key", @@ -4185,42 +4369,70 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "database", .type = FUNCTION_TYPE_DATABASE, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateDatabaseFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "client_version", .type = FUNCTION_TYPE_CLIENT_VERSION, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateClientVersionFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "server_version", .type = FUNCTION_TYPE_SERVER_VERSION, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateServerVersionFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "server_status", .type = FUNCTION_TYPE_SERVER_STATUS, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateServerStatusFunc, }, { .name = "current_user", .type = FUNCTION_TYPE_CURRENT_USER, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateCurrentUserFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "user", .type = FUNCTION_TYPE_USER, .classification = FUNC_MGT_SYSTEM_INFO_FUNC | FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateUserFunc, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, }, { .name = "_irowts", .type = FUNCTION_TYPE_IROWTS, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_INTERP_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateTimePseudoColumn, .getEnvFunc = getTimePseudoFuncEnv, .initFunc = NULL, @@ -4231,6 +4443,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_isfilled", .type = FUNCTION_TYPE_ISFILLED, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_INTERP_PC_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIsFilledPseudoColumn, .getEnvFunc = NULL, .initFunc = NULL, @@ -4251,7 +4467,11 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_table_count", .type = FUNCTION_TYPE_TABLE_COUNT, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC, - .translateFunc = translateTableCountPseudoColumn, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = NULL, @@ -4261,7 +4481,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_geomfromtext", .type = FUNCTION_TYPE_GEOM_FROM_TEXT, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, - .translateFunc = translateInStrOutGeom, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE}}, + .translateFunc = translateOutGeom, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = geomFromTextFunction, @@ -4271,6 +4504,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_astext", .type = FUNCTION_TYPE_AS_TEXT, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateInGeomOutStr, .getEnvFunc = NULL, .initFunc = NULL, @@ -4281,7 +4527,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_makepoint", .type = FUNCTION_TYPE_MAKE_POINT, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, - .translateFunc = translateIn2NumOutGeom, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE}}, + .translateFunc = translateOutGeom, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = makePointFunction, @@ -4291,6 +4550,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_intersects", .type = FUNCTION_TYPE_INTERSECTS, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4301,6 +4573,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_equals", .type = FUNCTION_TYPE_EQUALS, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4311,6 +4596,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_touches", .type = FUNCTION_TYPE_TOUCHES, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4321,6 +4619,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_covers", .type = FUNCTION_TYPE_COVERS, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4331,6 +4642,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_contains", .type = FUNCTION_TYPE_CONTAINS, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4341,6 +4665,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "st_containsproperly", .type = FUNCTION_TYPE_CONTAINS_PROPERLY, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_GEOMETRY_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, .initFunc = NULL, @@ -4351,7 +4688,11 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_tbuid", .type = FUNCTION_TYPE_TBUID, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, - .translateFunc = translateTbUidColumn, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = qPseudoTagFunction, @@ -4361,6 +4702,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_vgid", .type = FUNCTION_TYPE_VGID, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_INT_TYPE}}, .translateFunc = translateVgIdColumn, .getEnvFunc = NULL, .initFunc = NULL, @@ -4371,6 +4716,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "to_timestamp", .type = FUNCTION_TYPE_TO_TIMESTAMP, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateToTimestamp, .getEnvFunc = NULL, .initFunc = NULL, @@ -4381,7 +4739,29 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "to_char", .type = FUNCTION_TYPE_TO_CHAR, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateToChar, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = toCharFunction, @@ -4391,7 +4771,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_avg_middle", .type = FUNCTION_TYPE_AVG_PARTIAL, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateAvgMiddle, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, @@ -4406,7 +4799,11 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_vgver", .type = FUNCTION_TYPE_VGVER, .classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, - .translateFunc = translateVgVerColumn, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = qPseudoTagFunction, @@ -4416,7 +4813,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_std_state", .type = FUNCTION_TYPE_STD_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateStdState, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunction, @@ -4428,7 +4838,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_std_state_merge", .type = FUNCTION_TYPE_STD_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateStdStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunctionMerge, @@ -4438,7 +4861,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_avg_state", .type = FUNCTION_TYPE_AVG_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateAvgState, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, .processFunc = avgFunction, @@ -4450,7 +4886,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_avg_state_merge", .type = FUNCTION_TYPE_AVG_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateAvgStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getAvgFuncEnv, .initFunc = avgFunctionSetup, .processFunc = avgFunctionMerge, @@ -4460,7 +4909,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_spread_state", .type = FUNCTION_TYPE_SPREAD_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateSpreadState, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getSpreadFuncEnv, .initFunc = spreadFunctionSetup, .processFunc = spreadFunction, @@ -4472,7 +4934,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_spread_state_merge", .type = FUNCTION_TYPE_SPREAD_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateSpreadStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getSpreadFuncEnv, .initFunc = spreadFunctionSetup, .processFunc = spreadFunctionMerge, @@ -4483,7 +4958,21 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_FIRST_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_TSMA_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastState, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = firstFunction, @@ -4496,7 +4985,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_FIRST_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_TSMA_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = firstFunctionMerge, @@ -4507,7 +5009,21 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_TSMA_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastState, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false, + .isFirstLast = true}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = lastFunction, @@ -4520,16 +5036,43 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .type = FUNCTION_TYPE_LAST_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_MULTI_RES_FUNC | FUNC_MGT_IMPLICIT_TS_FUNC | FUNC_MGT_KEEP_ORDER_FUNC | FUNC_MGT_FORBID_SYSTABLE_FUNC | FUNC_MGT_TSMA_FUNC | FUNC_MGT_PRIMARY_KEY_FUNC, - .translateFunc = translateFirstLastStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, .initFunc = functionSetup, .processFunc = lastFunctionMerge, .finalizeFunc = firstLastPartialFinalize, }, - { .name = "_hyperloglog_state", + { + .name = "_hyperloglog_state", .type = FUNCTION_TYPE_HYPERLOGLOG_STATE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_COUNT_LIKE_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateHLLState, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getHLLFuncEnv, .initFunc = functionSetup, .processFunc = hllFunction, @@ -4541,7 +5084,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_hyperloglog_state_merge", .type = FUNCTION_TYPE_HYPERLOGLOG_STATE_MERGE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_COUNT_LIKE_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateHLLStateMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = getHLLFuncEnv, .initFunc = functionSetup, .processFunc = hllFunctionMerge, @@ -4551,7 +5107,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "md5", .type = FUNCTION_TYPE_MD5, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateMd5, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, + .translateFunc = translateOutVarchar, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = md5Function, @@ -4561,6 +5130,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_group_const_value", .type = FUNCTION_TYPE_GROUP_CONST_VALUE, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SELECT_FUNC | FUNC_MGT_KEEP_ORDER_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateSelectValue, .getEnvFunc = getSelectivityFuncEnv, .initFunc = functionSetup, @@ -4571,7 +5144,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "stddev_pop", .type = FUNCTION_TYPE_STDDEV, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunction, @@ -4589,7 +5175,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "var_pop", .type = FUNCTION_TYPE_STDVAR, .classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_TSMA_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunction, @@ -4607,7 +5206,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "_stdvar_merge", .type = FUNCTION_TYPE_STDVAR_MERGE, .classification = FUNC_MGT_AGG_FUNC, - .translateFunc = translateStdMerge, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, .initFunc = stdFunctionSetup, .processFunc = stdFunctionMerge, @@ -4623,7 +5234,11 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "pi", .type = FUNCTION_TYPE_PI, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translatePi, + .parameters = {.minParamNum = 0, + .maxParamNum = 0, + .paramInfoPattern = 0, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = piFunction, @@ -4633,7 +5248,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "exp", .type = FUNCTION_TYPE_EXP, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = expFunction, @@ -4643,7 +5271,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "ln", .type = FUNCTION_TYPE_LN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = lnFunction, @@ -4653,7 +5294,29 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "mod", .type = FUNCTION_TYPE_MOD, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateIn2NumOutDou, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = modFunction, @@ -4663,7 +5326,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "sign", .type = FUNCTION_TYPE_SIGN, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInOutNum, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, + .translateFunc = translateOutNum, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = signFunction, @@ -4673,7 +5349,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "degrees", .type = FUNCTION_TYPE_DEGREES, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = degreesFunction, @@ -4683,7 +5372,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "radians", .type = FUNCTION_TYPE_RADIANS, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateInNumOutDou, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutDouble, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = radiansFunction, @@ -4693,7 +5395,29 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "truncate", .type = FUNCTION_TYPE_TRUNCATE, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTrunc, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = truncFunction, @@ -4703,7 +5427,29 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "trunc", .type = FUNCTION_TYPE_TRUNCATE, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateTrunc, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = truncFunction, @@ -4713,7 +5459,38 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "substring", .type = FUNCTION_TYPE_SUBSTR, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateSubstr, + .parameters = {.minParamNum = 2, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = substrFunction, @@ -4723,7 +5500,38 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "substring_index", .type = FUNCTION_TYPE_SUBSTR_IDX, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translateSubstrIdx, + .parameters = {.minParamNum = 3, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = false, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][2] = {.isLastParam = true, + .startParam = 3, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, + .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = substrIdxFunction, @@ -4733,6 +5541,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "char", .type = FUNCTION_TYPE_CHAR, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = -1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = -1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateChar, .getEnvFunc = NULL, .initFunc = NULL, @@ -4743,6 +5564,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "ascii", .type = FUNCTION_TYPE_ASCII, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAscii, .getEnvFunc = NULL, .initFunc = NULL, @@ -4753,7 +5587,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "position", .type = FUNCTION_TYPE_POSITION, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, - .translateFunc = translatePosition, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateOutBigInt, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = positionFunction, @@ -4763,6 +5610,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "trim", .type = FUNCTION_TYPE_TRIM, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateTrim, .getEnvFunc = NULL, .initFunc = NULL, @@ -4773,6 +5633,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "replace", .type = FUNCTION_TYPE_REPLACE, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 3, + .maxParamNum = 3, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 3, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateReplace, .getEnvFunc = NULL, .initFunc = NULL, @@ -4783,6 +5656,28 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "repeat", .type = FUNCTION_TYPE_REPEAT, .classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC, + .parameters = {.minParamNum = 2, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateRepeat, .getEnvFunc = NULL, .initFunc = NULL, @@ -4793,7 +5688,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "weekday", .type = FUNCTION_TYPE_WEEKDAY, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateWeekday, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = weekdayFunction, @@ -4803,7 +5711,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "dayofweek", .type = FUNCTION_TYPE_DAYOFWEEK, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateWeekday, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = dayofweekFunction, @@ -4813,7 +5734,31 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "week", .type = FUNCTION_TYPE_WEEK, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateWeek, + .parameters = {.minParamNum = 1, + .maxParamNum = 2, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = false, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .inputParaInfo[0][1] = {.isLastParam = true, + .startParam = 2, + .endParam = 2, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = true, + .hasRange = false, + .fixedValueSize = 8, + .fixedNumValue = {0, 1, 2, 3, 4, 5, 6, 7}}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = weekFunction, @@ -4823,7 +5768,20 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "weekofyear", .type = FUNCTION_TYPE_WEEKOFYEAR, .classification = FUNC_MGT_SCALAR_FUNC, - .translateFunc = translateWeekofyear, + .parameters = {.minParamNum = 1, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, + .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, .sprocessFunc = weekofyearFunction, @@ -4833,6 +5791,19 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .name = "rand", .type = FUNCTION_TYPE_RAND, .classification = FUNC_MGT_SCALAR_FUNC, + .parameters = {.minParamNum = 0, + .maxParamNum = 1, + .paramInfoPattern = 1, + .inputParaInfo[0][0] = {.isLastParam = true, + .startParam = 1, + .endParam = 1, + .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, + .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, + .isPK = false, + .isTs = false, + .isFixedValue = false, + .hasRange = false}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateRand, .getEnvFunc = NULL, .initFunc = NULL, diff --git a/source/util/src/terror.c b/source/util/src/terror.c index 9b85e82184..52a3be120d 100644 --- a/source/util/src/terror.c +++ b/source/util/src/terror.c @@ -761,6 +761,11 @@ TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_SETUP_ERROR, "Function set up fail TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_INVALID_RES_LENGTH, "Function result exceed max length") TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_HISTOGRAM_ERROR, "Function failed to calculate histogram") TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_PERCENTILE_ERROR, "Function failed to calculate percentile") +TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_FUNTION_PARA_RANGE, "Invalid function para range") +TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS, "Function parameter should be primary timestamp") +TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_FUNTION_PARA_PK, "Function parameter should be primary key") +TAOS_DEFINE_ERROR(TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL, "Function parameter should have column") + //udf TAOS_DEFINE_ERROR(TSDB_CODE_UDF_STOPPING, "udf is stopping") diff --git a/tests/army/query/function/test_function.py b/tests/army/query/function/test_function.py index bf7cf49290..d54460804a 100644 --- a/tests/army/query/function/test_function.py +++ b/tests/army/query/function/test_function.py @@ -296,7 +296,7 @@ class TDTestCase(TBase): def test_error(self): tdSql.error("select * from (select to_iso8601(ts, timezone()), timezone() from ts_4893.meters \ - order by ts desc) limit 1000;", expectErrInfo="Not supported timzone format") # TS-5340 + order by ts desc) limit 1000;", expectErrInfo="Invalid parameter data type : to_iso8601") # TS-5340 def run(self): tdLog.debug(f"start to excute {__file__}") From 2a2c3d8b9087897dac99fb952b19264002246bc9 Mon Sep 17 00:00:00 2001 From: wangjiaming0909 <604227650@qq.com> Date: Thu, 24 Oct 2024 15:14:07 +0800 Subject: [PATCH 095/102] fix tsma test case --- tests/system-test/2-query/tsma.py | 6 +++--- tests/system-test/test.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/system-test/2-query/tsma.py b/tests/system-test/2-query/tsma.py index acde26d5ca..a1638ae4cb 100644 --- a/tests/system-test/2-query/tsma.py +++ b/tests/system-test/2-query/tsma.py @@ -1354,14 +1354,14 @@ class TDTestCase: tdSql.execute('alter table test.t0 ttl 2', queryTimes=1) tdSql.execute('flush database test') res_tb = TSMAQCBuilder().md5('1.test.tsma1_t0') - self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) + self.wait_query_err(f'desc test.`{res_tb}`', wait_query_seconds, -2147473917) # test drop multi tables tdSql.execute('drop table test.t3, test.t4') res_tb = TSMAQCBuilder().md5('1.test.tsma1_t3') - self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) + self.wait_query_err(f'desc test.`{res_tb}`', wait_query_seconds, -2147473917) res_tb = TSMAQCBuilder().md5('1.test.tsma1_t4') - self.wait_query_err(f'desc `{res_tb}`', wait_query_seconds, -2147473917) + self.wait_query_err(f'desc test.`{res_tb}`', wait_query_seconds, -2147473917) # test drop stream tdSql.error('drop stream tsma1', -2147471088) ## TSMA must be dropped first diff --git a/tests/system-test/test.py b/tests/system-test/test.py index 57a4789f2e..fb3357a2b9 100644 --- a/tests/system-test/test.py +++ b/tests/system-test/test.py @@ -689,6 +689,6 @@ if __name__ == "__main__": if conn is not None: conn.close() if asan: - tdDnodes.StopAllSigint() + #tdDnodes.StopAllSigint() tdLog.info("Address sanitizer mode finished") sys.exit(0) From c642531e2f2e1d3591057faa4bd825095f51d421 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Thu, 24 Oct 2024 15:58:39 +0800 Subject: [PATCH 096/102] fix(az/stream): catch all exceptions from cpp --- source/libs/azure/src/az.cpp | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 831694356a..5989a7c333 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -416,7 +416,7 @@ _exit: return 0; } -int32_t azGetObjectToFile(const char *object_name, const char *fileName) { +int32_t azGetObjectToFileImpl(const char *object_name, const char *fileName) { int32_t code = TSDB_CODE_SUCCESS; std::string accountName = tsS3AccessKeyId[0]; std::string accountKey = tsS3AccessKeySecret[0]; @@ -450,6 +450,23 @@ int32_t azGetObjectToFile(const char *object_name, const char *fileName) { TAOS_RETURN(code); } +int32_t azGetObjectToFile(const char *object_name, const char *fileName) { + int32_t code = 0; + + try { + code = azGetObjectToFileImpl(object_name, fileName); + } catch (const std::exception &e) { + azError("%s: Reason Phrase: %s", __func__, e.what()); + + code = TAOS_SYSTEM_ERROR(EIO); + azError("%s failed at line %d since %s", __func__, __LINE__, tstrerror(code)); + + TAOS_RETURN(code); + } + + TAOS_RETURN(code); +} + int32_t azGetObjectsByPrefix(const char *prefix, const char *path) { const std::string delimiter = "/"; std::string accountName = tsS3AccessKeyId[0]; From 5f7dbde47fa0a408fd024819df17322a1a4faa75 Mon Sep 17 00:00:00 2001 From: Jing Sima Date: Thu, 24 Oct 2024 15:07:10 +0800 Subject: [PATCH 097/102] enh:[TD-32459] Abstract function properties into a struct. --- source/libs/function/inc/builtins.h | 20 +- source/libs/function/inc/functionMgtInt.h | 11 +- source/libs/function/src/builtins.c | 1451 +++++++++------------ 3 files changed, 605 insertions(+), 877 deletions(-) diff --git a/source/libs/function/inc/builtins.h b/source/libs/function/inc/builtins.h index c76d32efee..fb0db58f1c 100644 --- a/source/libs/function/inc/builtins.h +++ b/source/libs/function/inc/builtins.h @@ -31,10 +31,10 @@ typedef EFuncDataRequired (*FFuncDynDataRequired)(void* pRes, SDataBlockInfo* pB typedef EFuncReturnRows (*FEstimateReturnRows)(SFunctionNode* pFunc); #define MAX_FUNC_PARA_NUM 16 - +#define MAX_FUNC_PARA_FIXED_VALUE_NUM 16 typedef struct SParamRange { - double dMinVal; - double dMaxVal; + int64_t iMinVal; + int64_t iMaxVal; } SParamRange; typedef struct SParamInfo { @@ -43,17 +43,11 @@ typedef struct SParamInfo { int8_t endParam; uint64_t validDataType; uint64_t validNodeType; - bool hasRange; - bool isTs; // used for input parameter - bool isPK; // used for input parameter - bool isFixedValue; // used for input parameter - bool hasColumn; // used for input parameter, parameter must contain columns - bool isFirstLast; // special check for first and last - bool isTimeUnit; // used for input parameter, need check whether time unit is valid - bool isHistogramBin; // used for input parameter, need check whether histogram bin is valid + uint64_t paramAttribute; + uint8_t valueRangeFlag; // 0 for no range and no fixed value, 1 for value has range, 2 for fixed value uint8_t fixedValueSize; - char fixedStrValue[MAX_FUNC_PARA_NUM][16]; // used for input parameter - int32_t fixedNumValue[MAX_FUNC_PARA_NUM]; // used for input parameter + char* fixedStrValue[MAX_FUNC_PARA_FIXED_VALUE_NUM]; // used for input parameter + int64_t fixedNumValue[MAX_FUNC_PARA_FIXED_VALUE_NUM]; // used for input parameter SParamRange range; } SParamInfo; diff --git a/source/libs/function/inc/functionMgtInt.h b/source/libs/function/inc/functionMgtInt.h index 924ec6d40a..e10581beb6 100644 --- a/source/libs/function/inc/functionMgtInt.h +++ b/source/libs/function/inc/functionMgtInt.h @@ -102,7 +102,16 @@ extern "C" { #define FUNC_PARAM_SUPPORT_COLUMN_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(6) #define FUNC_PARAM_SUPPORT_NOT_VALUE_NODE FUNC_MGT_FUNC_PARAM_SUPPORT_NODE(7) -#define FUNC_PARAM_SUPPORT_NODE_MAX 7 +#define FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE 0 +#define FUNC_PARAM_MUST_BE_PRIMTS 1 +#define FUNC_PARAM_MUST_BE_PK 2 +#define FUNC_PARAM_MUST_HAVE_COLUMN 3 +#define FUNC_PARAM_MUST_BE_TIME_UNIT 4 +#define FUNC_PARAM_VALUE_NODE_NOT_NULL 5 + +#define FUNC_PARAM_NO_SPECIFIC_VALUE 0 +#define FUNC_PARAM_HAS_RANGE 1 +#define FUNC_PARAM_HAS_FIXED_VALUE 2 #define FUNC_ERR_RET(c) \ do { \ diff --git a/source/libs/function/src/builtins.c b/source/libs/function/src/builtins.c index 276c77f567..5e21cd94e8 100644 --- a/source/libs/function/src/builtins.c +++ b/source/libs/function/src/builtins.c @@ -536,7 +536,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* if (numOfParams != 4) { (void)snprintf(errMsg, msgLen, "%s", msg1); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } cJSON* start = cJSON_GetObjectItem(binDesc, "start"); @@ -548,20 +548,20 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* if (!cJSON_IsNumber(start) || !cJSON_IsNumber(count) || !cJSON_IsBool(infinity)) { (void)snprintf(errMsg, msgLen, "%s", msg3); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (count->valueint <= 0 || count->valueint > 1000) { // limit count to 1000 (void)snprintf(errMsg, msgLen, "%s", msg4); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (isinf(start->valuedouble) || (width != NULL && isinf(width->valuedouble)) || (factor != NULL && isinf(factor->valuedouble)) || (count != NULL && isinf(count->valuedouble))) { (void)snprintf(errMsg, msgLen, "%s", msg5); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } int32_t counter = (int32_t)count->valueint; @@ -577,7 +577,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* if (intervals == NULL) { (void)snprintf(errMsg, msgLen, "%s", msg9); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (cJSON_IsNumber(width) && factor == NULL && binType == LINEAR_BIN) { // linear bin process @@ -585,7 +585,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg6); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } for (int i = 0; i < counter + 1; ++i) { intervals[startIndex] = start->valuedouble + i * width->valuedouble; @@ -593,7 +593,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg5); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } startIndex++; } @@ -603,13 +603,13 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg7); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (factor->valuedouble < 0 || factor->valuedouble == 0 || factor->valuedouble == 1) { (void)snprintf(errMsg, msgLen, "%s", msg8); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } for (int i = 0; i < counter + 1; ++i) { intervals[startIndex] = start->valuedouble * pow(factor->valuedouble, i * 1.0); @@ -617,7 +617,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg5); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } startIndex++; } @@ -625,7 +625,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg3); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (infinity->valueint == true) { @@ -633,7 +633,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* intervals[numOfBins - 1] = INFINITY; // in case of desc bin orders, -inf/inf should be swapped if (numOfBins < 4) { - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (intervals[1] > intervals[numOfBins - 2]) { @@ -644,7 +644,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* if (binType != USER_INPUT_BIN) { (void)snprintf(errMsg, msgLen, "%s", msg3); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } numOfBins = cJSON_GetArraySize(binDesc); intervals = taosMemoryCalloc(numOfBins, sizeof(double)); @@ -658,7 +658,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg3); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } int i = 0; while (bin) { @@ -667,13 +667,13 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* (void)snprintf(errMsg, msgLen, "%s", msg3); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } if (i != 0 && intervals[i] <= intervals[i - 1]) { (void)snprintf(errMsg, msgLen, "%s", msg3); taosMemoryFree(intervals); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } bin = bin->next; i++; @@ -681,7 +681,7 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* } else { (void)snprintf(errMsg, msgLen, "%s", msg3); cJSON_Delete(binDesc); - return TSDB_CODE_FAILED; + return TSDB_CODE_FUNC_HISTOGRAM_ERROR; } cJSON_Delete(binDesc); @@ -689,6 +689,120 @@ static int32_t validateHistogramBinDesc(char* binDescStr, int8_t binType, char* return TSDB_CODE_SUCCESS; } +static int32_t checkRangeValue(SNode *pNode, SParamRange range, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + if (pNode->type == QUERY_NODE_VALUE) { + SValueNode* pVal = (SValueNode*)pNode; + if (IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { + if (pVal->datum.i < range.iMinVal || + pVal->datum.i > range.iMaxVal) { + code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; + *isMatch = false; + } + } else { + if ((int64_t)pVal->datum.d < range.iMinVal || + (int64_t)pVal->datum.d > range.iMaxVal) { + code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; + *isMatch = false; + } + } + } else { + // for other node type, range check should be done in process function + } + return code; +} + +static int32_t checkFixedValue(SNode *pNode, const SParamInfo *paramPattern, int32_t paramIdx, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + bool checkStr = paramSupportVarBinary(paramPattern->validDataType) || + paramSupportVarchar(paramPattern->validDataType) || + paramSupportNchar(paramPattern->validDataType); + if (pNode->type == QUERY_NODE_VALUE) { + SValueNode* pVal = (SValueNode*)pNode; + if (!checkStr) { + for (int32_t k = 0; k < paramPattern->fixedValueSize; k++) { + if (pVal->datum.i == paramPattern->fixedNumValue[k]) { + code = TSDB_CODE_SUCCESS; + *isMatch = true; + break; + } else { + code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; + *isMatch = false; + } + } + } else { + for (int32_t k = 0; k < paramPattern->fixedValueSize; k++) { + if (strcasecmp(pVal->literal, paramPattern->fixedStrValue[k]) == 0) { + code = TSDB_CODE_SUCCESS; + *isMatch = true; + break; + } else { + code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; + *isMatch = false; + } + } + } + } else { + // for other node type, fixed value check should be done in process function + } + return code; +} + +static int32_t checkPrimTS(SNode *pNode, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_TIMESTAMP_TYPE(getSDataTypeFromNode(pNode)->type) || + !((SColumnNode*)pNode)->isPrimTs) { + code = TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS; + *isMatch = false; + } + return code; +} + +static int32_t checkPrimaryKey(SNode *pNode, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type) || + !((SColumnNode*)pNode)->isPk) { + code = TSDB_CODE_FUNC_FUNTION_PARA_PK; + *isMatch = false; + } + return code; +} + +static int32_t checkHasColumn(SNode *pNode, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + if (!nodesExprHasColumn(pNode)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL; + *isMatch = false; + } + return code; +} + +static int32_t checkValueNodeNotNull(SNode *pNode, bool *isMatch) { + int32_t code = TSDB_CODE_SUCCESS; + if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type) && QUERY_NODE_VALUE == nodeType(pNode)) { + code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + *isMatch = false; + } + return code; +} + +static int32_t checkTimeUnit(SNode *pNode, int32_t precision, bool *isMatch) { + if (nodeType(pNode) != QUERY_NODE_VALUE || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { + *isMatch = false; + return TSDB_CODE_FUNC_FUNTION_PARA_TYPE; + } + + if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type)) { + *isMatch = true; + return TSDB_CODE_SUCCESS; + } + + int32_t code = validateTimeUnitParam(precision, (SValueNode*)pNode); + if (TSDB_CODE_SUCCESS != code) { + *isMatch = false; + } + return code; +} static int32_t validateParam(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { int32_t code = TSDB_CODE_SUCCESS; SNodeList* paramList = pFunc->pParameterList; @@ -736,139 +850,44 @@ static int32_t validateParam(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { SValueNode* pVal = (SValueNode*)pNode; pVal->notReserved = true; } - // check range value - if (paramPattern[paramIdx].hasRange) { - if (pNode->type == QUERY_NODE_VALUE) { - SValueNode* pVal = (SValueNode*)pNode; - if (IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { - if ((double)pVal->datum.i < paramPattern[paramIdx].range.dMinVal || - (double)pVal->datum.i > paramPattern[paramIdx].range.dMaxVal) { - code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; - isMatch = false; - break; - } - } else { - if ((double)pVal->datum.d < paramPattern[paramIdx].range.dMinVal || - (double)pVal->datum.d > paramPattern[paramIdx].range.dMaxVal) { - code = TSDB_CODE_FUNC_FUNTION_PARA_RANGE; - isMatch = false; - break; - } - } - } else { - // for other node type, range check should be done in process function - } + switch (paramPattern[paramIdx].valueRangeFlag) { + case FUNC_PARAM_NO_SPECIFIC_VALUE: + break; + case FUNC_PARAM_HAS_RANGE: + code = checkRangeValue(pNode, paramPattern[paramIdx].range, &isMatch); + break; + case FUNC_PARAM_HAS_FIXED_VALUE: + code = checkFixedValue(pNode, ¶mPattern[paramIdx], paramIdx, &isMatch); + break; + default: + break; } - // check fixed value - if (paramPattern[paramIdx].isFixedValue) { - if (pNode->type == QUERY_NODE_VALUE) { - SValueNode* pVal = (SValueNode*)pNode; - if (IS_NUMERIC_TYPE(getSDataTypeFromNode(pNode)->type)) { - for (int32_t k = 0; k < paramPattern[paramIdx].fixedValueSize; k++) { - if (pVal->datum.i == paramPattern[paramIdx].fixedNumValue[k]) { - code = TSDB_CODE_SUCCESS; - isMatch = true; - break; - } else { - code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; - isMatch = false; - } - } - } else if (IS_STR_DATA_TYPE(getSDataTypeFromNode(pNode)->type)) { - for (int32_t k = 0; k < paramPattern[paramIdx].fixedValueSize; k++) { - if (strcasecmp(pVal->literal, paramPattern[paramIdx].fixedStrValue[k]) == 0) { - code = TSDB_CODE_SUCCESS; - isMatch = true; - break; - } else { - code = TSDB_CODE_FUNC_FUNTION_PARA_VALUE; - isMatch = false; - } - } - } - if (!isMatch) { - break; - } - } else { - // for other node type, fixed value check should be done in process function - } + if (!isMatch) { + break; } - // check isTs - if (paramPattern[paramIdx].isTs) { - if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_TIMESTAMP_TYPE(getSDataTypeFromNode(pNode)->type) || - !((SColumnNode*)pNode)->isPrimTs) { - code = TSDB_CODE_FUNC_FUNTION_PARA_PRIMTS; - isMatch = false; + switch (paramPattern[paramIdx].paramAttribute) { + case FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE: + break; + case FUNC_PARAM_MUST_BE_PRIMTS: + code = checkPrimTS(pNode, &isMatch); + break; + case FUNC_PARAM_MUST_BE_PK: + code = checkPrimaryKey(pNode, &isMatch); + break; + case FUNC_PARAM_MUST_HAVE_COLUMN: + code = checkHasColumn(pNode, &isMatch); + break; + case FUNC_PARAM_VALUE_NODE_NOT_NULL: + code = checkValueNodeNotNull(pNode, &isMatch); + break; + case FUNC_PARAM_MUST_BE_TIME_UNIT: + code = checkTimeUnit(pNode, pFunc->node.resType.precision, &isMatch); + break; + default: break; - } } - // check isPK - if (paramPattern[paramIdx].isPK) { - if (nodeType(pNode) != QUERY_NODE_COLUMN || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type) || - !((SColumnNode*)pNode)->isPk) { - code = TSDB_CODE_FUNC_FUNTION_PARA_PK; - isMatch = false; - break; - } - } - // check hasColumn - if (paramPattern[paramIdx].hasColumn) { - if (!nodesExprHasColumn(pNode)) { - code = TSDB_CODE_FUNC_FUNTION_PARA_HAS_COL; - isMatch = false; - break; - } - } - // check first and last - if (paramPattern[paramIdx].isFirstLast) { - if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type) && QUERY_NODE_VALUE == nodeType(pNode)) { - code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; - isMatch = false; - break; - } - } - // check time unit - if (paramPattern[paramIdx].isTimeUnit) { - if (nodeType(pNode) != QUERY_NODE_VALUE || !IS_INTEGER_TYPE(getSDataTypeFromNode(pNode)->type)) { - code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; - isMatch = false; - break; - } - - if (IS_NULL_TYPE(getSDataTypeFromNode(pNode)->type)) { - code = TSDB_CODE_SUCCESS; - isMatch = true; - continue; - } - - code = validateTimeUnitParam(pFunc->node.resType.precision, (SValueNode*)pNode); - if (TSDB_CODE_SUCCESS != code) { - isMatch = false; - break; - } - } - // check histogram binary - if (paramPattern[paramIdx].isHistogramBin) { - if (nodeType(pNode) != QUERY_NODE_VALUE) { - code = TSDB_CODE_FUNC_FUNTION_PARA_TYPE; - isMatch = false; - break; - } - SValueNode *pValue = (SValueNode *)pNode; - SValueNode *pBinValue = (SValueNode *)nodesListGetNode(paramList, 1); - char* binDesc = varDataVal(pValue->datum.p); - int8_t binType = validateHistogramBinType(varDataVal(pBinValue->datum.p)); - if (binType == UNKNOWN_BIN) { - code = TSDB_CODE_FUNC_FUNCTION_HISTO_TYPE; - isMatch = false; - break; - } - code = validateHistogramBinDesc(binDesc, binType, errMsg, (int32_t)sizeof(errMsg)); - if (TSDB_CODE_SUCCESS != code) { - code = TSDB_CODE_FUNC_HISTOGRAM_ERROR; - isMatch = false; - break; - } + if (!isMatch) { + break; } } @@ -1650,6 +1669,45 @@ static int32_t translateOutVarchar(SFunctionNode* pFunc, char* pErrBuf, int32_t return TSDB_CODE_SUCCESS; } +static int32_t translateHistogramImpl(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(validateParam(pFunc, pErrBuf, len)); + int32_t numOfParams = LIST_LENGTH(pFunc->pParameterList); + int8_t binType; + char* binDesc; + for (int32_t i = 1; i < numOfParams; ++i) { + SValueNode* pValue = (SValueNode*)nodesListGetNode(pFunc->pParameterList, i); + if (i == 1) { + binType = validateHistogramBinType(varDataVal(pValue->datum.p)); + if (binType == UNKNOWN_BIN) { + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, + "HISTOGRAM function binType parameter should be " + "\"user_input\", \"log_bin\" or \"linear_bin\""); + } + } + + if (i == 2) { + char errMsg[128] = {0}; + binDesc = varDataVal(pValue->datum.p); + if (TSDB_CODE_SUCCESS != validateHistogramBinDesc(binDesc, binType, errMsg, (int32_t)sizeof(errMsg))) { + return buildFuncErrMsg(pErrBuf, len, TSDB_CODE_FUNC_FUNTION_ERROR, errMsg); + } + } + } + return TSDB_CODE_SUCCESS; +} + +static int32_t translateHitogram(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(translateHistogramImpl(pFunc, pErrBuf, len)); + pFunc->node.resType = (SDataType){.bytes = 512, .type = TSDB_DATA_TYPE_BINARY}; + return TSDB_CODE_SUCCESS; +} +static int32_t translateHistogramPartial(SFunctionNode* pFunc, char* pErrBuf, int32_t len) { + FUNC_ERR_RET(translateHistogramImpl(pFunc, pErrBuf, len)); + pFunc->node.resType = + (SDataType){.bytes = getHistogramInfoSize() + VARSTR_HEADER_SIZE, .type = TSDB_DATA_TYPE_BINARY}; + return TSDB_CODE_SUCCESS; +} + // clang-format off const SBuiltinFuncDefinition funcMgtBuiltins[] = { { @@ -1664,10 +1722,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .dataRequiredFunc = countDataRequired, @@ -1696,9 +1752,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_UBIGINT_TYPE}}, .translateFunc = translateSum, .dataRequiredFunc = statisDataRequired, @@ -1727,9 +1782,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateMinMax, .dataRequiredFunc = statisDataRequired, @@ -1755,9 +1809,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateMinMax, .dataRequiredFunc = statisDataRequired, @@ -1783,10 +1836,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, @@ -1814,9 +1865,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, @@ -1840,9 +1890,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, @@ -1868,17 +1917,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getLeastSQRFuncEnv, @@ -1903,10 +1950,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, @@ -1936,9 +1981,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, @@ -1963,9 +2007,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getAvgFuncEnv, @@ -1991,18 +2034,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 11, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 0, .iMaxVal = 100}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translatePercentile, .dataRequiredFunc = statisDataRequired, @@ -2029,26 +2070,23 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 0, .iMaxVal = 100}}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedStrValue = {"default", "t-digest"}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, @@ -2078,26 +2116,23 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 0, .iMaxVal = 100}}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedStrValue = {"default", "t-digest"}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, @@ -2123,26 +2158,23 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 0, .iMaxVal = 100}}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedStrValue = {"default", "t-digest"}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, @@ -2169,18 +2201,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getTopBotFuncEnv, @@ -2206,18 +2236,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = TOP_BOTTOM_QUERY_LIMIT}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getTopBotFuncEnv, @@ -2242,9 +2270,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, @@ -2273,9 +2300,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, @@ -2299,9 +2325,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, .translateFunc = translateOutDouble, @@ -2330,19 +2355,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, - .isTs = true, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_MUST_BE_PRIMTS, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isTimeUnit = true}, + .paramAttribute = FUNC_PARAM_MUST_BE_TIME_UNIT, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .dataRequiredFunc = statisDataRequired, .translateFunc = translateOutDouble, @@ -2398,17 +2419,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_NOT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, @@ -2432,26 +2451,23 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = DBL_MAX}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = INT64_MAX}}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, @@ -2476,9 +2492,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateAddPrecOutDouble, .getEnvFunc = getIrateFuncEnv, @@ -2502,36 +2517,29 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_TINYINT_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][2] = {.isLastParam = false, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, - .isPK = false, - .isTs = true, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_MUST_BE_PRIMTS, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][3] = {.isLastParam = true, .startParam = 4, .endParam = 4, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_COLUMN_NODE, - .isPK = true, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_MUST_BE_PK, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getIrateFuncEnv, @@ -2552,9 +2560,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateAddPrecOutDouble, .getEnvFunc = getIrateFuncEnv, @@ -2576,10 +2583,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = lastDynDataReq, @@ -2605,10 +2610,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, @@ -2628,10 +2631,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, @@ -2652,10 +2653,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dynDataRequiredFunc = lastDynDataReq, @@ -2677,10 +2676,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, @@ -2701,10 +2698,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = firstDynDataReq, @@ -2731,9 +2726,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dynDataRequiredFunc = firstDynDataReq, @@ -2756,10 +2750,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, @@ -2783,10 +2775,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .dynDataRequiredFunc = lastDynDataReq, @@ -2813,10 +2803,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dynDataRequiredFunc = lastDynDataReq, @@ -2839,10 +2827,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getFirstLastFuncEnv, @@ -2866,10 +2852,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .dataRequiredFunc = statisDataRequired, @@ -2891,42 +2875,35 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, + .fixedValueSize = 3, .fixedStrValue = {"user_input", "linear_bin", "log_bin"}}, .inputParaInfo[0][2] = {.isLastParam = false, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .isHistogramBin = true, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][3] = {.isLastParam = true, .startParam = 4, .endParam = 4, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, + .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, - .translateFunc = translateOutVarchar, + .translateFunc = translateHitogram, .getEnvFunc = getHistogramFuncEnv, .initFunc = histogramFunctionSetup, .processFunc = histogramFunction, @@ -2951,41 +2928,35 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, + .fixedValueSize = 3, .fixedStrValue = {"user_input", "linear_bin", "log_bin"}}, .inputParaInfo[0][2] = {.isLastParam = false, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][3] = {.isLastParam = true, .startParam = 4, .endParam = 4, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, + .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, - .translateFunc = translateOutVarchar, + .translateFunc = translateHistogramPartial, .getEnvFunc = getHistogramFuncEnv, .initFunc = histogramFunctionSetup, .processFunc = histogramFunctionPartial, @@ -3007,10 +2978,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getHistogramFuncEnv, @@ -3034,10 +3003,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = getHLLFuncEnv, @@ -3063,10 +3030,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, .translateFunc = translateOutVarchar, @@ -3090,10 +3055,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .classification = FUNC_MGT_AGG_FUNC, .translateFunc = translateOutBigInt, @@ -3120,19 +3083,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 4, .fixedNumValue = {0, 1, 2, 3}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, @@ -3158,19 +3117,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 6, .fixedStrValue = {"LT", "GT", "LE", "GE", "NE", "EQ"}}, .inputParaInfo[0][2] = {.isLastParam = true, @@ -3178,10 +3133,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_BIGINT_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = getStateFuncEnv, @@ -3203,19 +3156,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 6, .fixedStrValue = {"LT", "GT", "LE", "GE", "NE", "EQ"}}, .inputParaInfo[0][2] = {.isLastParam = false, @@ -3223,20 +3172,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_BIGINT_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][3] = {.isLastParam = true, .startParam = 4, .endParam = 4, .validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .isTimeUnit = true}, + .paramAttribute = FUNC_PARAM_MUST_BE_TIME_UNIT, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = getStateFuncEnv, @@ -3258,10 +3202,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_DOUBLE_TYPE | FUNC_PARAM_SUPPORT_UBIGINT_TYPE}}, .translateFunc = translateCsum, .getEnvFunc = getCsumFuncEnv, @@ -3284,20 +3226,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = 1000.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = 1000}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getMavgFuncEnv, @@ -3319,20 +3257,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = 1000.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = 1000}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateSampleTail, .getEnvFunc = getSampleFuncEnv, @@ -3353,30 +3287,24 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 1.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 1, .iMaxVal = 100}}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = true, - .range = {.dMinVal = 0.0, .dMaxVal = 100.0}}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_RANGE, + .range = {.iMinVal = 0, .iMaxVal = 100}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateSampleTail, .getEnvFunc = getTailFuncEnv, @@ -3397,11 +3325,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .hasColumn = true}, + .paramAttribute = FUNC_PARAM_MUST_HAVE_COLUMN, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getUniqueFuncEnv, @@ -3422,11 +3347,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .hasColumn = true}, + .paramAttribute = FUNC_PARAM_MUST_HAVE_COLUMN, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = getModeFuncEnv, @@ -3448,10 +3370,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutNum, .getEnvFunc = NULL, @@ -3471,19 +3391,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3503,19 +3419,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3535,10 +3447,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3558,10 +3468,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutNum, .getEnvFunc = NULL, @@ -3581,10 +3489,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutNum, .getEnvFunc = NULL, @@ -3604,19 +3510,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutNum, .getEnvFunc = NULL, @@ -3636,10 +3538,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3659,10 +3559,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3682,10 +3580,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3705,10 +3601,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3728,10 +3622,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3751,10 +3643,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -3774,10 +3664,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = NULL, @@ -3797,10 +3685,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = NULL, @@ -3820,10 +3706,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 8, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateConcat, .getEnvFunc = NULL, @@ -3843,19 +3727,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 9, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateConcatWs, .getEnvFunc = NULL, @@ -3875,10 +3755,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -3898,10 +3776,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -3921,10 +3797,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateLtrim, .getEnvFunc = NULL, @@ -3944,10 +3818,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateRtrim, .getEnvFunc = NULL, @@ -3967,19 +3839,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -3999,10 +3867,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_BOOL_TYPE | FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE}}, .translateFunc = translateCast, .getEnvFunc = NULL, @@ -4022,19 +3888,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateToIso8601, .getEnvFunc = NULL, @@ -4054,19 +3916,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, @@ -4088,29 +3946,22 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .isTimeUnit = true}, + .paramAttribute = FUNC_PARAM_MUST_BE_TIME_UNIT, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 2, .fixedNumValue = {0, 1}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, @@ -4132,21 +3983,16 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE | FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .isTimeUnit = true}, - .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, + .paramAttribute = FUNC_PARAM_MUST_BE_TIME_UNIT, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, + .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, .initFunc = NULL, @@ -4305,10 +4151,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_VALUE_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_JSON_TYPE}}, .translateFunc = translateToJson, .getEnvFunc = NULL, @@ -4489,10 +4333,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE}}, .translateFunc = translateOutGeom, .getEnvFunc = NULL, @@ -4512,10 +4354,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE}}, .translateFunc = translateInGeomOutStr, .getEnvFunc = NULL, @@ -4535,10 +4375,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE}}, .translateFunc = translateOutGeom, .getEnvFunc = NULL, @@ -4558,10 +4396,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4581,10 +4417,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4604,10 +4438,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4627,10 +4459,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4650,10 +4480,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4673,10 +4501,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_GEOMETRY_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BOOL_TYPE}}, .translateFunc = translateIn2GeomOutBool, .getEnvFunc = NULL, @@ -4724,10 +4550,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE}}, .translateFunc = translateToTimestamp, .getEnvFunc = NULL, @@ -4747,19 +4571,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_STRING_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = NULL, @@ -4779,10 +4599,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .dataRequiredFunc = statisDataRequired, @@ -4821,10 +4639,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, @@ -4846,10 +4662,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getStdFuncEnv, @@ -4869,10 +4683,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getAvgFuncEnv, @@ -4894,10 +4706,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getAvgFuncEnv, @@ -4917,10 +4727,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_TIMESTAMP_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getSpreadFuncEnv, @@ -4942,10 +4750,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getSpreadFuncEnv, @@ -4966,11 +4772,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, @@ -4993,10 +4796,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, @@ -5017,11 +4818,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false, - .isFirstLast = true}, + .paramAttribute = FUNC_PARAM_VALUE_NODE_NOT_NULL, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, @@ -5044,10 +4842,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getFirstLastFuncEnv, @@ -5067,10 +4863,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_ALL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getHLLFuncEnv, @@ -5092,10 +4886,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = getHLLFuncEnv, @@ -5115,10 +4907,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateOutVarchar, .getEnvFunc = NULL, @@ -5152,10 +4942,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, @@ -5183,10 +4971,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, @@ -5214,9 +5000,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = getStdFuncEnv, @@ -5256,10 +5041,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -5279,10 +5062,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -5302,19 +5083,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -5334,10 +5111,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE}}, .translateFunc = translateOutNum, .getEnvFunc = NULL, @@ -5357,10 +5132,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -5380,10 +5153,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutDouble, .getEnvFunc = NULL, @@ -5403,19 +5174,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -5435,19 +5202,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -5467,28 +5230,22 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -5508,28 +5265,22 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = false, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][2] = {.isLastParam = true, .startParam = 3, .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateOutFirstIn, .getEnvFunc = NULL, @@ -5549,10 +5300,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = -1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NUMERIC_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE}}, .translateFunc = translateChar, .getEnvFunc = NULL, @@ -5572,10 +5321,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAscii, .getEnvFunc = NULL, @@ -5595,10 +5342,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateOutBigInt, .getEnvFunc = NULL, @@ -5618,10 +5363,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateTrim, .getEnvFunc = NULL, @@ -5641,10 +5384,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 3, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateReplace, .getEnvFunc = NULL, @@ -5664,19 +5405,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_VARCHAR_TYPE | FUNC_PARAM_SUPPORT_NCHAR_TYPE}}, .translateFunc = translateRepeat, .getEnvFunc = NULL, @@ -5696,10 +5433,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, @@ -5719,10 +5454,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, @@ -5742,19 +5475,15 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .inputParaInfo[0][1] = {.isLastParam = true, .startParam = 2, .endParam = 2, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = true, - .hasRange = false, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_HAS_FIXED_VALUE, .fixedValueSize = 8, .fixedNumValue = {0, 1, 2, 3, 4, 5, 6, 7}}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, @@ -5776,10 +5505,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_UNIX_TS_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_BIGINT_TYPE}}, .translateFunc = translateAddPrecOutBigint, .getEnvFunc = NULL, @@ -5799,10 +5526,8 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = { .endParam = 1, .validDataType = FUNC_PARAM_SUPPORT_INTEGER_TYPE | FUNC_PARAM_SUPPORT_NULL_TYPE, .validNodeType = FUNC_PARAM_SUPPORT_EXPR_NODE, - .isPK = false, - .isTs = false, - .isFixedValue = false, - .hasRange = false}, + .paramAttribute = FUNC_PARAM_NO_SPECIFIC_ATTRIBUTE, + .valueRangeFlag = FUNC_PARAM_NO_SPECIFIC_VALUE,}, .outputParaInfo = {.validDataType = FUNC_PARAM_SUPPORT_DOUBLE_TYPE}}, .translateFunc = translateRand, .getEnvFunc = NULL, From 1cd254ea96c67fc956ec496b62b7fde0f77fc327 Mon Sep 17 00:00:00 2001 From: Shengliang Guan Date: Thu, 24 Oct 2024 17:04:35 +0800 Subject: [PATCH 098/102] doc: minor changes --- docs/zh/14-reference/03-taos-sql/12-distinguished.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/zh/14-reference/03-taos-sql/12-distinguished.md b/docs/zh/14-reference/03-taos-sql/12-distinguished.md index 0b834dea29..d7696b1859 100644 --- a/docs/zh/14-reference/03-taos-sql/12-distinguished.md +++ b/docs/zh/14-reference/03-taos-sql/12-distinguished.md @@ -76,7 +76,7 @@ window_clause: { FILL 语句指定某一窗口区间数据缺失的情况下的填充模式。填充模式包括以下几种: 1. 不进行填充:NONE(默认填充模式)。 -2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如:FILL(VALUE, 1.23)。这里需要注意,最终填充的值受由相应列的类型决定,如 FILL(VALUE, 1.23),相应列为 INT 类型,则填充值为 1, 若查询列表中有多列需要FILL, 则需要给每一个FILL列指定VALUE, 如`SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`, 注意, SELECT表达式中只有包含普通列时才需要指定FILL VALUE, 如`_wstart`, `_wstart+1a`, `now`, `1+1` 以及使用partition by时的partition key(如tbname)都不需要指定VALUE, 如`timediff(last(ts), _wstart)`则需要指定VALUE。 +2. VALUE 填充:固定值填充,此时需要指定填充的数值。例如:FILL(VALUE, 1.23)。这里需要注意,最终填充的值受由相应列的类型决定,如 FILL(VALUE, 1.23),相应列为 INT 类型,则填充值为 1, 若查询列表中有多列需要 FILL, 则需要给每一个 FILL 列指定 VALUE, 如 `SELECT _wstart, min(c1), max(c1) FROM ... FILL(VALUE, 0, 0)`, 注意, SELECT 表达式中只有包含普通列时才需要指定 FILL VALUE, 如 `_wstart`, `_wstart+1a`, `now`, `1+1` 以及使用 partition by 时的 partition key (如 tbname)都不需要指定 VALUE, 如 `timediff(last(ts), _wstart)` 则需要指定VALUE。 3. PREV 填充:使用前一个非 NULL 值填充数据。例如:FILL(PREV)。 4. NULL 填充:使用 NULL 填充数据。例如:FILL(NULL)。 5. LINEAR 填充:根据前后距离最近的非 NULL 值做线性插值填充。例如:FILL(LINEAR)。 From 68f651a92f8caa674ee7c33fc35ceebf0ae6122c Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 25 Oct 2024 08:37:20 +0800 Subject: [PATCH 099/102] az/object to file: static impl func --- source/libs/azure/src/az.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/libs/azure/src/az.cpp b/source/libs/azure/src/az.cpp index 5989a7c333..5f95624c94 100644 --- a/source/libs/azure/src/az.cpp +++ b/source/libs/azure/src/az.cpp @@ -416,7 +416,7 @@ _exit: return 0; } -int32_t azGetObjectToFileImpl(const char *object_name, const char *fileName) { +static int32_t azGetObjectToFileImpl(const char *object_name, const char *fileName) { int32_t code = TSDB_CODE_SUCCESS; std::string accountName = tsS3AccessKeyId[0]; std::string accountKey = tsS3AccessKeySecret[0]; From 3795f9cd137036673345be598b2cebae1b38fea1 Mon Sep 17 00:00:00 2001 From: Minglei Jin Date: Fri, 25 Oct 2024 10:26:15 +0800 Subject: [PATCH 100/102] fix(vnode/commit): fix sync commit log output --- source/dnode/vnode/src/vnd/vnodeCommit.c | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/source/dnode/vnode/src/vnd/vnodeCommit.c b/source/dnode/vnode/src/vnd/vnodeCommit.c index dae2b3a5ec..3ebcf50858 100644 --- a/source/dnode/vnode/src/vnd/vnodeCommit.c +++ b/source/dnode/vnode/src/vnd/vnodeCommit.c @@ -409,7 +409,12 @@ int32_t vnodeSyncCommit(SVnode *pVnode) { vnodeAWait(&pVnode->commitTask); _exit: - vError("vgId:%d, %s failed at line %d since %s", TD_VID(pVnode), __func__, lino, tstrerror(code)); + if (code) { + vError("vgId:%d, %s failed at line %d since %s", TD_VID(pVnode), __func__, lino, tstrerror(code)); + } else { + vInfo("vgId:%d, sync commit end", TD_VID(pVnode)); + } + return code; } From 1da7897a44eb944a03be27360e44152631f68d2f Mon Sep 17 00:00:00 2001 From: Shengliang Guan Date: Fri, 25 Oct 2024 14:41:25 +0800 Subject: [PATCH 101/102] doc: init directory --- .../zh/06-advanced/06-data-analysis/01-arima.md | 10 ++++++++++ docs/zh/06-advanced/06-data-analysis/index.md | 13 +++++++++++++ .../06-data-analysis/pic/data-analysis.png | Bin 0 -> 63007 bytes 3 files changed, 23 insertions(+) create mode 100644 docs/zh/06-advanced/06-data-analysis/01-arima.md create mode 100644 docs/zh/06-advanced/06-data-analysis/index.md create mode 100644 docs/zh/06-advanced/06-data-analysis/pic/data-analysis.png diff --git a/docs/zh/06-advanced/06-data-analysis/01-arima.md b/docs/zh/06-advanced/06-data-analysis/01-arima.md new file mode 100644 index 0000000000..1668da453c --- /dev/null +++ b/docs/zh/06-advanced/06-data-analysis/01-arima.md @@ -0,0 +1,10 @@ +--- +title: "ARIMA" +sidebar_label: "ARIMA" +--- + +本节讲述如何 ARIMA 算法的使用方法。 + +## 功能概述 + +…… \ No newline at end of file diff --git a/docs/zh/06-advanced/06-data-analysis/index.md b/docs/zh/06-advanced/06-data-analysis/index.md new file mode 100644 index 0000000000..a1ca53d7d0 --- /dev/null +++ b/docs/zh/06-advanced/06-data-analysis/index.md @@ -0,0 +1,13 @@ +--- +sidebar_label: 数据分析 +title: 数据分析功能 +--- + +## 概述 + +TDengine 提供数据分析功能的扩展组件,通过引入 ANode,TDengine 能够支持时间序列的机器学习分析 + +下图展示了数据分析的技术架构。 + +![数据分析功能架构图](./pic/data-analysis.png) + diff --git a/docs/zh/06-advanced/06-data-analysis/pic/data-analysis.png b/docs/zh/06-advanced/06-data-analysis/pic/data-analysis.png new file mode 100644 index 0000000000000000000000000000000000000000..1598238f2cc7ce6fafa1be9e45c9d46f5759d36c GIT binary patch literal 63007 zcmd42_d^p;v_3qWkWdUwdPh+@5_%JfCIC9i0hoDUeF6lQfaMx6JVapk5Y($A zq&p~LGs_(^+XLdk>9N_-|8yi88D0A@I(l+4GqtqTI=*#sMPByLpFajEC+B^3Rh6&a z($OZdS+8h6R*&3rb8)e}YpR*FNXJukE9vN6b$@VRz1}Y5I>0KoFw5t#;irDGI2$=?bJHiPY!{Zdh^pP7YgVJ@BO%#RqV=1l^g);d9aSm}2Xo+s=AQy2RH78pre-hw! z_0pipjO!4Opx_a8rRw7^ZLBUbLiPdNc@v9;R4Dhr0&zj*>B2F6Gm{0w40AW3Pk&0} z%UU6v9+Vpv=4_b3HEkF_c%Nf_f9maQa#)YvyFp!ZlNrQ%ZyLqMO{vtS0=*xA`#Z5st6G2x0tf1&v3nE$MWEd(0KF*-kq#P(~2tm)7nn%@{)A zG^h&yKYIPLNHWy3t(24*Uk03|ios-8ECoG|P*RtoNlG1(x4Y=HP+3?}{@ zZ=W`3_8@eIZD}Iz9j8}?8$V1VC@6`$atBPIFk+qi+p|W;4 zM2q}_->vcxY&Zgv4e!%%(()Q0&g7j}q>SE4d031c1Ip*_1rOCv-@()H333WrHdG*f zC87F>r}oHHPHjlFiPi)P*VRjSqxVA(TStOHwFgE#f0q`2LV+7 zW0lyG4Oggf+LOqo3FL*QF=>VjfCo8q)C71_^Zz0g#k%AILg!AA5={-6WYOa+{r{(1 ziEZ}s3R_Ss&cMFN z6{*%!EqnjH*}`ZGoz;K5#>Xw{9Lzk*bW@2p(76QB4G{!{&4Op5df0G%M7HoHPm)NF zY(MmlIL_Wx8efaMrxGxFhSioxVYkP7`3t<8(p##Un)^A@o5of5-$||4s*apS4~4Co zzUC4jsN;6>-DRrh!DgwBPoLZQ=Xig}+5hTh+?FdsV8ymMvt`0K{JO7X7JsAL#}0MT zUTEq^6Nqi99?w&Tu0uVhVBvnoIUba-W&ld`_Ge6!)|b+z~t z@WS}hyDI`P>Ln(ixph-%Q>c(nG{mjHac>5_zF>bNFDZr6eRFN>$5)5_29ZmM>9{bc5SJ#)|N*0J6m_638r7}YQD$j9lv4-XGdwZFeu$I+c- zrl5HSeYhn5%-7^#-I-k{l50*d8wwAF4)t{VCBMuXyrQKNjSxTkcjvIU1p$tSh%56W zi2E(9bhYqXEcZQNyUIKE-g}0tnkJX{w=9|l&}@@~r-FpO^qbQFz9%xQ@a_Xu=LJB} zu>;@8STUA4nN+Bw`Lyym%`a=VX7`jW2O zN={VRzBZ68pZeZzjv=?*EohPlgmMyC3XN5TH?o{}0_GgfuD3Je{z2go$jpG63iqUq zvSo@t3Jck&GMcyy1q@IJ9&f1bwZzZxe%GSj=$x;=rTS|0Lm#=p?BSKQ*QyU(=0>HH z->-1X?R@bWI{Od-rcaRI@RrjXcnL!o(0X$7qnS)3+XoLGK0$Of)4~2`i@~d|*Q$$) z>KyOs5B;B|K6xEJK4$a&A7$>Ssz}f5YFD$jj+|z>6Q>)T>QP<{qiN8TgmGrvmo=wX z?A3G{ufkrQyB2W{TPxoX@AUJpd-sHT3_Z#4-iwX*MVIJDURIlzKG9I(9C-whAWb4g zqpvyKU{Q!N_W+7-z4`_lhLW)2+0hh(^NZAZcjvDu+r~#t%xsOdku`*Da%XEd@j>;OZhpFcUq`s`UzWi=y8s|n0V}>&Imvo z06JemfR@GiHJ>3h*pJo05_Fs934E%yF5UD(f<!EO=g$YK#SX-Mp^LoITihRm2vM`u0o6P zfBZibhFR07b70|$7d&HQq}X11HE~4XiS!T0RrTvGwzAptx0)1F&=_F3 zLyg>BMy)?3xqDzqUmZR4Y17ur8X8#>B;v3HeP02UT>U&nLQMt*;!?Kg>ZO);q)`2J zqg|_$L~~ofb!CcSJf{f6cB?QcQL|{Z$3EV&1RsdGH?0^(F7(~xa+i7({4C|RGS(D< zw`bnHlHwyE7VX8C!>Tox&MhGVm}eWTd{nHsYJbz!IfY`!XSl-uDV8Cs;M39jK&+{d zL)-;A`iq;orgW{I47|K@<1fD^abUQw?nItvq5p;)2}N9{ADPiZ9nOLHl~M#1U%xPC z1j7-eoGtwvU`^Z4U|_tnlCOhiz4Y-92WK^idGI; zcLP}{p+w6p^;V6?>^#6B##O!+Tuw--&Z~^sri`%!rtEeBZp>4E4sD|>QK%NygG3a7 zD;@zFL;T4L=pP(ckS$W2A3t8)ptsT8b|#F!icLJm$QJy0@6xd1EL+r``hy2U_&MAP zLS8=$HpGj7PvWW~!{&a!7%uIQK4S4*ST#ruH<0 zUPby*px3{*f_?8wpO8zGvnf~L=c9&lKzPP2@`w4e>`0T~9(Tv>t8$C`RDoLxx9~H) zz;z920`LjBgp?t;8(6*wrcp6V^@Z@N3>A(c^eSaslN9WoMrfMgQ>{LPkg{)pyY_Zv zPO~yyev$AZFR&y$Nq{x|P?rsF2_$4;!Vb0{g)eopESDS22!>^gVtbvR2|6vYPP=kj<1#qD2!|? zHa}2vQnRKRVHwGz*ll!~R5(*-18h}X(|AN#!J=}pha@WfzLBO8?y{kL%X*UtgLCvL zAH>Dr6~sQU*g|+YJkhx`bDQ(}8#p2z^4jvbyJU#hLSw;ze{@J5Z_2s08Z2ChIOf0* zLIo{fBzmfhdM9PbfJSvjINp1(-ZuZ{sxAUjdVFfSd_Bn&2a%sIZC%g+_Pjvr5#&@4@GL}jI zF^yh##&8!wJ+Jt>mw3ULHbiRpEm^%ZBeSip??G;tG-bBatP;Q6!^_ljk5qEoB@9u{X{+uojR0n2!4Vf3N%$@v*okD!LFoQINOx}WrOJ=} zE0hG7ew_<~eX;OeO3=s~`mYC3BLqi1^I-!`!ma01lG3j;4?Ep_SeXHlfJ2_o2%;%K zFlEMxucru9Zk*fpY*4VO%Bs!~AUFuyhn?h3Y>UA^OjX?UXEnUJiPRiEK)ZJd69{}> zi?^JgAjvFnN9E`^-MVT;WLo!8W|&)%mT_RVee z*`e3pDe40WiL*N^g^2l9D+cpm7beM$G8+hRFLQ(I4C_j^psGhZbyD)580^G~kA5ZF zC~mcDKE?AU=tnRs&dJQthnX2?f&^c_9%grswK}q;e$OQech)+gRY~h0VxA(aIHcl5n7!Re8O|Gp)e#D+5+fFZH zd-WpwAjRJiN>C*0>1UMvSN)aoBSs$Ymrs6cLkp|%>CBjkK9EbzjREV+nCz@{+Py=w z!jefrq{$ojZfM8>%R3z;^Oxu|5wZDLX51JBQjKomI3`jas#Cl6rZ+Zi3PWhL(2P|K zUGSmIet1nX==kaXtB&x>NappsznG*HrH;;= zXE1n33MRjR{TDso;toT-=8W%iA!XlDDZi_w7@MzxmkjT@Gd12h^&dmhrt{mNRVVs10HM#0DZ{htx+V;ez8*#LpW%7 zf2`G53_r4aH&%8XS`RUX02vbL$B4#;T7QY`_deHZmcw(B`tyTR3^-uMN47aMV+c3n zuEvqQ6v`CB!Hwb7!b&UQx`Tu!V1jzimB?=Uns6n zAk%nS?GS5^Sg1tus&{@;aDb|WqB9f?C3$NjinNw(gT4i<6(eFV@6+A=(j`4tx*ck1 z4!DWaybqN1Af&Cbd+D`ogvKPURy$^~FHi~0~PrC2!holS>N zD5&K-Ep&)e&>A$Z&xpTh{&wj@Id%5iof&(Jm8uk|q+LUjbn*;XBUTd=-&yhJdMRiY z1RI6;kuN=NnmQ#XjasLwi6UiRubx|m|K6`w#AG{4zzGuK`^{75FjsB_;xqnB9FlN> z9r&tyr&#QHQ59v36DH=B5i_nv+J0BLm!*s>-SWsv5fD-&;F3G2+wYXkitlNk*qt{d zpXdb*s!tnwVoQvnG~z{tk7|#*=RIZ;-ws1dMl4Z6<19F7z2K(jx35JtSHT%Y2j0H% zP<}*&xFq88*^wCteBc6Q4nQa~*K4S#U!_QJj=WX};otD7NQM+)?#a;q zU#Z*&UKI^=4m(a?kIG=hFPKNaHGWv^kWKonJM1s-$Rm? zvBmdO!NBkgM8P)GWfYBb@HJ6q0VgXy4F6YMa3SEBzg=AF3qHF`1<}pCB9zZgwj9Mh z!tB4eC)<^yw9o_7YkB7d|H_fezPBAe_ucRMWc`l!`1eh|Puo@*k&M3{;A{VoA$M01 zn_SE|EPVf)(ZyTUB&1m!lgFlb+ac4DtmSeBi=A7{VW}g+T9^9kELH1MZuC+J5}-&H~A~mV)FFn z_Ju89FR^paN<@sxk^j1-{xn9;{=Q^T=cWHGHrmbRnM*&jnFpwhoh)bEaZA5E`Z$SQ z(?Gx_KXB*~XLidx{7u2~LU_C^;pC*O3ypTh+9dvH&l!Dd_Kl@;#(BhIx@L!$c)uk= z<@;)fsa8lD--^=8Oi&$d_wN1P#$m3ubFBp~w;td+UWKVW`t;Q5)f2id@z9K^0LD;QCyu?@ z)N5|6YE3^Nq>AURm}X8^^&NK?WBtix9=kL1S|}2lY!Ig2ktCjVop^I09B@XFTuH@A z$~xB7o)7z~*eu0Bg98wWt4)$P7fHWP?eP~u%Zj|J-|1(ca>G{y;SLa{LdIPT$7z9U zUDq{YwQdm2kZ>gac`qQh5O`oO_!J@`woxD>?1E4w4`xlRT{xKBk^`YeynDy|?cZ}; zr5}dd1V%~S)c!X71$jo*YZ<8T=6UA$pW0W}i?dB=>>|F}nL^C89SIiG2!Z9qfz45_?L z-<27992vJs7^tM!{lsed`wg7So!_O8iWjh~KyFx4Av}K+6u|&NkHbb*M|IN2)1?O0 zEEk8ys+oSgaba9RLfMv@V^@Ewbm)yT~yTT)Uz=i+R6Q5H!-+iMs#6tg%v>t%TREO!Ird$PlA>}7i9QJvv8Bp0HssSzyg+lN)-f0 zLqSXb{pxn=C2<&cS=9ReXr2nUo2aNg88^7{ih_)r8%+aWtKYIHMXcI|o`b>eJwe$s z%*31Y?)CPIzZ1dS)r2h{&k*C8GUODI|~K-y3*cTKDUhB7JuFr?e-hY%6K>;vsi5Gy_MGzFc`P( z@17T;e|gkcekH5wepcp6^+}+=+%*8Lcb#gBh`LJ;n0Fn|y&ex*)NSb z>YLem%^P^qJn0%#UOug})e^kVx7c}i>)8i}aRn)vmMxiv0VA8W8%f($$v&QW4MerU zqHhq0|L_l#J`67PweSxP2@Wn5ITQ^pLe&>!?ca3r(^0Y*whi{aRoi@JRA$6jDJ`q| zZdS;ZnX<*Kq5Dg0SzY&MZXN2`2RnNDhdkUn!ayW=CjF}zZc7F%i11octU4QY(oBma zgZchf%zy?@X#{0$Rbz``XkGPxW)&oR7HZm813Cf6DM>i6{>_*uI(j05KSu&V_=6p%fi$L=e`TkbAqj6H~n;Z%dgvcUd+=0av+)`Mlw(pJUndq&-kIl|D+rO4e?H^3~GIAl$=PW?7NY-GQ|1r1)K48I*vkiG=L+wQNH7pX!DS6Tlq2{P$$44B`C%fA&~Dw7Th3ZQwDsm4IbBfR0eY*} zEwbano*k)nAK8)k?&gByab3uU!jFO}35=|Uju|6L$O-!@ zh2)m^CByC;RA#H#r4Bv;AxV^+BZ04`NOS`|h5V?#UnenFgK2Q?b zBL{S-O_6=O^kKfx2Zj=RTFumA!~cXBLWA#OkWkzd4AKRo6-p;H2$RdXE2~9d{+?UN z6s=?977f10xI_$9qS`h0?W*ogfe)FT>4?;~&zV@oUm#>t5wU5*>b5eMrJBh#woV>+ zLhI3n(`HaliB)ys=P?_z|5g^lqeFFPWpLt;(rvC3?G}O`ju?YYt@-UIu8I_dBMQrA z)}KEv4bY$S|2?WtVc9Ynp3Q)C5Jd=7hyaI{`HDFd#)6yZk-(JKhT zSq~i+3*;s*p|QvFLcfXg?aW#H$)*h#jvn75)!oK{KU*f*x;*NELQ?Zd>;5uf=tuABqLA28+=1(k4n=u@E+e%+O%(4a;-k(ymk<_VE_I=_^-D80b!QWYk2$7gM!$s*W($3 zdM@ApT%#a3E6GXxeyi~6V&V@D>&_sd(U0{HLqpdNz!eUC=(&?1Y|VqJVCE;5E4^Df zb(5KNhRvwiPK|Go5Q)NByyS=9dZNnB?fXxkR?i5UIW(%MGg~}p_6!+29NQo9IIA`} z$sDdSk}N7>aQ>0Kh}(cmR!oV5IQ8b=fs-aJ-G$3VKs5#1Bt}(KIj9h7pkjX4|3+0I zCNcA@EInTG{+GfiMTcw+hn9)_{SU7*Ui**L9+ZTe+9fz_RNPA(tWjPJ^q#wbw`K5N zx$-UKadV?d{+A*%O=;7pTqUfrx0bo}@C~bhXUIEW#kCD+&o??QY6m_%-jEQ3D>=m4 zn8g%nMZPmeTKjr`oqlTQk*4GQc|!Y>7st)w61NwMdw@1ahemPzAYU4E|%Od zn;`5_VT7@)OhE7-MnW+yj56;^5zL~pfoD+)G&pO}>M91?4!bD4meJ>;!Hd&P)n0XI zdAjw2(%*BSb7g`Bh1J@(x&A@yu4I6hhG$|wzZtjlD{DsK=T-KQuUo2}@7qezFFgXS zQ?7^G=n@`Oe?y;Rw#~XLTjI|agz{_I0$}u5kA9Q=!njF$&CA;)Sm}q3u8#`3rx>_P zR7LD0-K<@WzrCmvb@2=|TeBU>thc6QwVDe!5w!H|7(*QnX2oEgX<1mZqRV97;)VUw zzUqb-wY@YNX;+;TbDnfPN-%;7-Z}p=ERA?|U9NQFdQRfCVS|xN;XVIqU$WnqnHRK_ zOLUzu*z{9_ZIXw`&F&V?*auut7346Nzm#^c`9(z|rc$c}VRW&0BfxFmPuFE`-)1(f zj!*a%`ewwVzQUT80py2%v#K(9$Z$Xh?j0@BOZ$$U*mx6?vY#?n(X6_aq9q->T2w5& zDA=ccWm3MaslA>$I}jd{(j=8X;I6eg`6QOsdxbsQHtFW-VnGe{X5G}IcOrpunK#uY z>{9aVQanb5R`s@XPD|N}W<)A#UO{3cF6hKe_-k#2R@=W~>A1(6j}^>N!q*fsdi*3N zVk=nItUW3M+=K&N+brQtl8UC67gYjmtl!9Q333@U&DvcO$+p$8tI>vS-_|3G)vJYH%+niRhjrd80k< zm_;x2-}X?Xnzj%e`zJzI_E4jg%D373&sS}KF-7`8o~XvZoUuO}ZRoczW)Ytm)ZECE zw7qxZ>ZEIWU2=9c1Mc1hMkCbQ9}@PO&u*KYXwe(qFM9l*&1l}+)$ndt+3|>aL+K5p z>43i78u4^qv7!a|#Ng6T+}_alW#8V9D~bPl$^Y{W(L6I}#CDN$*dS11&IGYi2$iH$s2=+9aSX z`fqK<$3O09J2|;Shm_tzBqAwfRa;J=D2raGqV`f@#&0Mlf2ie;Y5NxsVRZU0E^g&tLNe2@qTT84vQ?AX z=2?2yuwI->Ys6=bY#)7I(~4i8z9*`{7c;WNyC>~9eq!A^dXKVv(bJj!ccYOEb}JEA zh{j4pbyXXXKm(B;&aZ7>&CUO0k~uh&;=Xg-b>>#_Rne6DRWrMX?!4Fk^+~{YyAhY@ zh7I0yL}#MMth#-u66CJ;-6sP{S7D2T*n5Nc{qqWxn?D=MOlC=m`xb6EYi$Bu zX0F3a2bFQu2q2X8u*7<~aMoj^E)J?I05mH3?$5a+n(#q}YwRgrPGD_`4I(HIZwmC4 zTpH)3w8*BU`e^Y>GMZIzn!CkxF`_Z$iL_QaDt)Ea|dPk;b1Vj>YFItD>WdH(O-t>^()EPQ2JDTRF2!Qihq2{U+G+RqVO?r?qL?Z{5@; zU;k3f?b>d}?_U^&(uvO_W5exZt~&Sp=wzGXw7ly1H=2up*JWX3Pd~~m(y}|kH?#Lw zzqAD>A+Ly0f=M?Y_OL2{4xiE;2&+q#T)G`mZ0(mV~JOlmg@$;_5ct(LS=9>O87V_ z1!;CgAn}wuYSFw7h~s>WeG+fROR`rjnQ>B0kL=qO7NlpudP~R+WSzC=9mc39AouIW zbRlT(-Q+0MVReI48hg$?o`q)7`nr$8RHXggj={$Zc|)fiYEg<0~u*&rBkIP z)FVy;IPjtnzNz<~-SJ9;5q(RtSB_=+na9rUW#65iZbZDf-fwo7iUnU8xxnQ*7r~e< zW-GI^k5`^neC&N^2Sw*8LQPswF;Kq7^q9MYsf;&~gUd{gykl)=g75Xz$=S>QDqcv* zh+HAv%#9zhH-El@h1pzS|0BM>^+Rg3_>|KU^E2laYl%B9agMx?6Gwjx-@TX+iGdBv zsdv)u(=YqImzb>?_%-B`Jmq#fB(Oy0?QhC0%I%0TRG{uz z%Kj*3vdyC3^@sY~PvMdDs`j>CgEN_NwlCN;8Wedf%(?Uoq0K#IUk?1ieg=*EEfU}C zqkxNZmnkP1rEK?pM7~g4d%@p0_bNWt4)uGG)mbOE#{5+dH@>)KTngN-C>;;z_PVJ3 zbID)N8i-uIw$5=?Ul#d0x&NXy67%QDhATJ8;c_SIMge47X9zw2xGJ=|^pq`mJxvU) zD?y(jxts)KZqMi-uGU79jLKs-!z16N<7}2NPw=3PW__EU*OR2u!cTn$SO#bZ*5zhvzs_%26+0Mvbv7-8qY6tzjlsvtlG)ODujaq8~CASTqv59 zE4e0j_II}Ii?-Owy0cWtw(K7x$QER>-nN4WE^Uu&A*P*Cqn*M+e~*?V`Sa+nvWz#c z7*>Q#i-_yz-#QJ9-tB5Weu0I@*N?>_$_o;l?1s5XV!9Uey1Kqy&wU*J zT<>ofg~9uX0C=#$@AonjYx`4-<*Fbyfz~_m>pxoc;y6DZhNBTMJz9>1oo6+4qibkD z=#?RxD84#%9(4R@`28Jq*}YboK0SSf7U^~dlb5ZsUjb__%=o~*YWV6&OXd4JtXu5ypm3dmQlN}PQdG z=~Y3bo!8fR(y1-$|Kg4~>|80go-4Wde8}M2=YI(pLi-idpD#YfPhO4a6ABxp%}YQ2 z2U+5WJRR@-;)!aZRf>3{KR3)C8A4;1bJaMHMw>%`E2HU2euAw}$dc#ZaN6j&D}X&Y zR7}=U-4$g!y%ixiE903q*}5e2Aj!oC zLBGYgUtGkT^=-_)Oj??MElDMs6>pgONo(MdvC4qkaBIC3ytAc%$+q263*GvGcR_k) zrdXhLK`Tk-3?Nf}6o&e7yRqq~IDt~`cf}wpP6jRdOZ{xIPYAt3BD4G->*pkGjrmC^ zVO_iIqKjwzY_eRaw`UuyE1D!kzCJOpuSFP2#4tIK<@{2o3sF$wb>-FKkMx^U&5w_61;`qS0-qgJI?Vi9PQ&gN57EVa`Wq(nFdXV#MPP9Ns z;`kkGPl6%Sx+l=pKYF}Evk|kdlOP6@f4m?ik&@j6_9jJ1zWpq6&fm^@YjHLs=S1mk z<|_X*WnZ4AfF0ln<|8fru8S-zDx$!EvJOrYyD19q_+P4L`xJB~o7I|o91z!X5%bN= z>y*R=%ouRbe1Z3VeY|;YHvePRNVpC&g7NJUQdf7Ne>cREx$foyg{GH?9hay0nbcI} zKgN&Ripd1shh7SuE%~~#;s8*K?}*f}zs2CcuUj^R)FnUCLe?nC-!>uFQb|jB9YnYM zjz~+^3oStcT0zq3&7uU1rL7w5_TCu0FX6xb&ps2CE-P#AgDBHk)qLFI0*^hjzXxYq zunKdqBJ5BI%|_2$!N>^xd+2`e+i6`6uCB`tlUrJ7nsihTlj4w&4Nwy~qpkkQ+c_vum|0SG~i*iFZN>$wAVgJcho%EKM`7;eUm3qMP_rusod+*R`XEr#=1o*2UU z-L~!Tm0QfXP#j4|!m2%VW~`(gpzZp8xSk&|Z=M&EYuUhCJ^R&MD~2pAXU3!M1AAti z>vpRG>7VDVu4K1&cEl@I%6cYX8<}D~mjxDl-w-g9UTB=gn0xwd>_C>dtw&GX!db*N z!kPx?<7?=DW&OUHItVBV&igc-Q$T+$IX|b1{Nz-ieDXNCEYRH?IighD)6$E?L$tjN z%|bg)gHp&(V;UBu{okf5pQp^NX7G@7M3w%#FC~-v``sO6_dq6J9?4gONmgRw_Mptx zKMM>9F{B**!$Uf4e;UoJGTcKILMh`e;OAei^ahHD?5a<)*$vLfk%vpZI1wB_zMAnA zGRVh?iErJ{hP-ZKmKU}0&yvE1Nj9dEzbg*j6Bk}J&$f%Y1ueZp>yB+UjeoCb{tWi{ zd|7%~JiVDc9&|lgk$t6dJL3~X81#;ueJPL4@;^(KRv5{&W5H&7ahD%HUX_49Yj^)9 z@8<*k9uYaFSqvUdeE8Heka-W5x`*Zl^2ZV@4vGY4x08?*l~uEek;|7Tkh)`tEvB$)li za|i9NBx!oN)wTyXenhkPn0IeDck7$l!>^{qw{} zN>&K3eUL(h+(m=s1vyKVHD2}%B=_H^(66B;rdt~+l4!W8ot|9E|FWF^AmH={5qjjD zcD=d(d|B`Z!i0nU6ksVp`>~8$L*^1QzGFm5-eUCI>*BeHTAFoZX1q%FgO)5PCR04k znrwH18Se{Ox>?qm5+RDiTazg7%b^fn1Kl5#0F}(0#E~XZoPB$jRQC)kZYu*rAbRGn zjp{rg{3Z*9s>2uPplmJ#e$(NuVb+^Q`Z>Wsg?HDEdL`gnv_*y`C=n8ka!L4zW*zlV z6oLC;MhMGK-W}8=t8==`kK_lq)Ch$Eu&4nX%FZZX#|jCgxZ617N!YbLl7QO_JBWKq zYSI7I$YV&DNq$7O2-0L?#m@`d%02P-vNf&Lo_**5^dq34YslThM6~-Wh#s<(XmT}p z-HL%MnW>iJA%hs`_IPa7@{RV_oA*<^!GaVVjIQ8xs^eJr`S?Csm)ZT1B)oWr72nr` zn8#!%&^}q57ocRpp{$eM4t~CGh5x7w3*W3(|(=HNZVG9NrlXSCy zLwTg^Td%Menl#w=j(I{@n7AWT3JnXc;nx?K3Z z8@(<#6Ax)xv*V}Ui(rn{dB0VdZep>Kosq9wg9dBfP^vs`7#d_CLY?aV;%U zHqrwG)`&<_Wh=FVUiMWWVJlM!Ei2rPVJSJVz`^tr{x1 zVU|cz^LjDvP7qBj*-k;Pz! zz5Q6v8Y(TLCZp@UZ4k6#zo-XhHZhi6x?l6V(YkhETqQ5IwLBDWc)Rv37NIj$Y-y5zsgynf3u)MR?+Elu$fNLQeiVMKp$0j*$ZwJs zJzwd>B%{2RXmcZa`^#<=Oy>+dmRQYa!rooJB-;ykW7*)J@f?@j`RwioG}gl}nX|`- z`iYCG;?d+iIo!rBUlS~_e2WB{ozYcu=dCT>3W)c33Nu754#IhsnS7!XF zR3FL#o7fWVqzj6)YxAU6dz-|NF$60ATn05kJKlM6^>IvY=eC{(lMe@@51o6f%5>Fg_H09gD#%Hvs2=P)Wjhw*O1+mXsQ2`OHH#|sO z2>IBM&rYfZMf^XI^`%tdGkCBWlbIAqljNpEPrB}xQQ*Lpmtj@bNmS%^^*(=Ou_^vT z{Gen99a}6pAGH|Jj52qs`ePz^SWEqKwDhB1GnVd6dRCLvKU?%Vf*9l#X?QCa(OlT` z%*O7K#$?-)wOBe(V(c3s)==HYGP6mhgP3KfPO`dkL;hFm=GahB`qzLRuP+T(&;B8^ zft`U6^X7FqscX*z3~ntt2X=&)K1-_iUfr^di0yduw#1F=`ES=ywo`Ys z>smcre<%JAr(X&s!SI+HTwX&pWq47x%jVJ7#SMGbnyU1$J`hE<6$XDS_=$+Y_r3eo zoiJ5*&Agv=BUHrB?w_Rqsh@(nW6TUo{$(XcucQD`q>0Bm)wJVK&OR}Mv*AMf>4Bb!kShE-(gh%g+c><9LO;D+5oN>8@8y#-RA*CA zUK2fcs8>n-vb+()jVY?v*=`s*e|&quk)E`IDywU>v4$Vvot)LXfUwg$ne3;zm?*92 zg#7e~azD+HV;=NVm~kq8a7BK4&)CgUvGTdikt*EFiS4gu^*MB=xL44k;Y^$TLLi1a z&LlPY6lFR4{}ik7tf_YS zGq0%9le6YWsej9!AYAnl4Gx}`-#?G(>lN_dE_)`xmF0e9o*VzWmUULa6IwIh(<%?{ z&LNrpzM7xhf6LLgy;Lmu+NFWUoUc`rTszX8C?TF_>(5)6A1v+xB?%_?vzq;1u9K!mtJV5#-gx;RAWeGfP~>e2q$|)du_+A; z_a{b}7g(+{R%Et?t$UaIL2g1*vj=aOyhM`w`KVdPI9}5h`C{D08A0>yt}cX}cqp5k zbi1Cq@?i+`?hv;XBL$6vSG?o=e_WFTI_=C??}{D`%Rc4rXJdTk-1*IfPQ~j0@%O@m z;&a6{4OX=FicEjZBe|=2hQ~Z#;`aIKFa9u+!st@=K7DzaNu;?{`iJEN$Kh-?Qdo#{ zOnt2o_E&D+e*9%VDmfOil&kD6#*fQktw9H}EZzf|8faoBL`5%)t7?Osd2G!{BK-!8 zJs&MNj|n9KZWihoW{Vi)?vHxdcl_hckNFshq+zLoVM#Y#oac+lU+Ec^M|Q5Hp?B_< ztP3TTCwpjHVUbeU9>`vE% ziO#Ob*?X_et16O=6i4lT+ao9K-=YI3uDIFyC_eh~AYqJmLr@hZ8fnaqFAloKh8In& zA0C|Q^1gRr_z~B6g~Au(u--%sx9+Vk7e4=vg}BoVjB=V8q&4S=cyz>J`e4qwUZZu7 z4q4dpm##2YmzJacf_%T+9BE51Io}od9(EgXoGV9{eIE0k?YsUo60TA%dW2&0G}+q! z{QU`wmCVGW$*!2IVIL(rb0R%{S6RBHe|nB)d*3;Z9n<@7(YjcTO32BDQF&TpF5}_r|x-MNy|a zuKR`oCl*GnP15V>i*gBy&X9aXKjFTScF*q$72)srz`4f_f(=cs8q%CRG`-(}s*m?C z{7sG(hbJ8qW&`AxTz_1x*myX^m3*VyrCaUp^`_&f>aMHe{0T<(&Aw4HzTvhe4YQAL z<6Yk3BbUGN?lsYUNq?XCSjT%nwrJ&lXFkX*B9R zd#7f}R*K^k-F05`fqDf*`_^dWAA3uExn=%;djTvo+i0DxU66P~?qe;%!$rqqVpX{~ zaFHq7pf+LR#Omdrqy#=T#;1}lZ|D{?sB0h_raHCyeG(SjQlRi&j4R#0ZFS-c)~!oF zJwC2K{o{g>Tvygrq{x=(If2m@|Jn1sld6Oe-x-UuqawDR{ywwihm3Z3J6jnAkwTff^_FS ze7^VlyZ65C`2!Bi%ro!j{XA#RoU=3U@mJ}1b9ekbQ`*Xw;UE9&l^;DD>QY>_yZAVY zkAHn&;1o|5Qs%8TEWI-~Pqy|btEReyHt^2$s;7JPFll_4S$gHC{`hsW^qW_cvObcj zgw)|UB57HdNl!k;m|wp37jg4Z$vI7_>%WBe)353$e(OG^Y0fD&prT8{+{iL ztf&7SU+#%=Cz-nB6}(kd62G$1c49(+MP%d+~a4 z)NG;a>Hj>~ut3-vU;n_=%snBqI!0^@JLnRakIOqUDcutB7>E-2!B*aNbIwnGvIcSB zXVLBm-}O};kq5@l*6}bD>N+s{&8>$!X}X*Yps{P(z{yXDb#Zc8LF-2PsgXrH3eMe7 zMyyPn}lg2qn6cu@(K`yi~j zv9U31QyFQCIM6u#ftT>qs*FSZB>ZgeaPcsH@n8P$Jwu~jln0_ST~xGsO73iT)WVg0+WbM?c0oBoLct!8y$o5>qSrB8KM{EKCYHsZAMv=Z08DVd}6Aud!1cR&a+IOz#wPj^YEsFFz9d<$w0lBF?R)!~GCk$6k;ZYW_ z!3*+9`E41qF=)auS~^fMyP4PThXr$n6ZOhA+c z7P~+MOBN_&5+?BUVf70&cp*Q6kVtd|R(NP+r|Uz&Q!o>gcl8QO0Yd!3)CiK057Y2B zoS5^0Ff3Y7EeeZgJSo)%|KQ576HhI$2^vMg#Eb=!&<;B+$DrwPn*)}{_yq?8_Jpg% zvB?69{s-#-NJ5Vs_%Otr`r@z|ZNT-y@w8P73(w$`z3~{)IgIKz!rCHS5QG&Mj>3Ku z=O742bzn&b*k%!cRWJ(bOpQSW0p-B$VFDE82qg^m8y@8=iwmIvVFNgvFd?5mu5V~@ z!U)MPYQ7PG!Q#`PY0yta;YL@XM-L1q+0nMGc>Te^hlEiZF4HL_v7f7nqwztpBaJf5e--Yd0feYrBot&yHpm{67{iUDRVo>tERLL==D-dI??=pp1tY zvJ$?~SV-uroC*dJItG74Y+)BXZFp$79`Mm1bS9zm!@ht`9TeiwPY2d-V|4qAus;H;^_z!92D-X-STxcNT` z!~6N;LwX5)ci6cBwF?lb@YR3K<3d@0UgG}^@_%inzx~R7!SeqD5+;^94VIihPr|U^ zDXlLY;iVrDZ(tLZe*ed!$VKq>KqTDGHGdjdM4|@6Rft#;w9Q$&WAI;WZVv}jaKaR1 zpq|6@{OJ?5%^AK32b@N^2y_1iKpywOtXZdj56@c=W~2K4&RJg>?WcbqR)RtB4I=}H z?HMsD0e@|UG+!<(FXx|Qr~=t!7T8mXA$FNIgoPvvqL5zpH3m)M;#qh?G!Fgp& zQ~X=RoxQvm;u?%=7cSFJ`I9PGJz~dBVF@Y6;$v(Pno&IH4(NKiSYOK2DkUGADCRBV z3N_sx80fD*>&#?x`5C|6UY*syOk~R?l*3|RhtRxEuKVGO7yYNX>3MyMNl}amGDNOP z68T6BzIHWvD#8M!L^BBxCAIQ4qraqyGDR^>GiYP>goi3pX+JsK-P?Fdh1)AcieO4Qr$ zoRj>zV-TJjK`gJ7W*Sa2Ibj79eI6Q}SvelPJwLs<=Q}K}hAP*LQ8#|{-ObS0Kyt|1 zg?HpQ-^ez9*lStI6JmWzY%41Cbt~^~yn)Zh`u=`h>}F^n`KspaQoojbQ4G=mYGkB) zZT>dWg>=uEf`r7TIi%^(EcMai{Q93>@+!LyIe!Kd6sg< zq#Gz`Y->!@aULQ*ph|vWCBmzI+u_Bk!%m3OUsr-1jqA9}b|?Kz zXW^mWo;s>rP-LwMFTv^eZtJFq(rLzHXVv>+R`nipQrclnz2Vf!XN?){KGjA)vwOe( z)%^PK_xozMBQq>`I3+LP_> z6eDHhG?loO&!u_>70nF&a%1nipKNg}{di887y0yVs06Fot>a^`szJAG@*!Rgm0)d5 z`8yPMx=A{c>QkiI?uh$%KpRnVj=Qjq1;b(-{{h&YOGmt zA+RNdO=d>et~?Rkrt}mN_LhkT#@ut#G1c@>KE@paDE8+fJ}=QH=ViLuLUZ`b`bleA z?d~t)+O)9${z=*0ud;$*O2r@#8F*o@?pp)f|7iiJsE^-x0nc*G5d1L@o4j8i|;Lzv4`0|515S z-|27R;Qzzq21!!Z1vU|Ahh73;;Ds?53MbYJB2q7y5&t4;3!f*NEMHVLy`ysh6h1b% z>Zg?9|9oU5fc=nu5^)hTVGICik~D<>i;;!sqPhQ^`md6T+6onHxiJ8?jE2`BVF{!! z5PI_dSC2sgE^K*$MO#keg1@93qKj&{`|WpF0C0H!k@@0NGsMybr2o6&JQy&ic+MKBhC(WNIJ1TN?vC2V?z`;YAFa^JW= z8Cbxf%E>H}(n0+;iTVaWCtAA-lNf#8UdXIW2q#B`k4Q|AOYGZ>=O`pdNO9Vdh4Dwb zmo1(b5ahvER7mBB-rz3()iVOHQP&k#^kK7hVt28pEc-}z1%QymRL_$qxkbs^;!UWN zOV3R%ljX5e)(*-(A$dH_X*E;$;ek)S9?36nbLPb#Me;2>X=k^pFl;H`ygP{(+GRK= zO#Fkzj`e3Ad{nDQWJ|7`ty@jd;Hnb~Hs{uK zl`?g@c>&zbGBZ@7nY&G0lL0U3n7+W(28SX9k!}Iw-4qZKj1# z;)EBwb^~f&5DyqsJk!DG?Z;JzGum!vf@rq`Q%t)miFXJeR__>!s=97-^VEm+)u*AJ zTY@iL7TiGe_UPJ$IlRdLb^~H26@9#1QXLUD+OqA7Wfa!_ zOUcSiiR{juio-vqQ?w2{2d~^Njf2?gd9#wo4rBhDj5ZF5*>oIM1$_R#xst9kofp^Dz(8A9d?|t-E65rzHOVo12*%OA2VcgZWoj$v|a^H&W_RX*%w0 zkhymSdmxXUQNh}WdY&(DnA{Eac?LuayukCSdb(YF@2c9#h50a0&E&k0Os~eew9*2K2I@o373h}RrpS{t`LK_xp z@+SAq1o4Md@sapD5mt+&NrN{`kJ=@s$)XxcB~?Ru###4nFmc}3bJ;=SP%{#$c6c>c z#5o#MzG-KHMls~-jo*Rhc{Yt{mBX;qP9M_2|J$0K*xo+#!~2-fJ1H5s65Zd&TCnzx z(RVO+cb46^KcC3-RGm6 zKD_ksxpS*h1DE_*W1YV*QsUTZApY`k!wb9w(N>2yf~@zZY!ZP}pE-JUe=rSwu!>jy z3RJ%J{}d=qja~imoq5q=NmdgacF)W;ZZ`GJ5fIrfG&uZ2_nmpp&P&SfQlm{j$H{tC z7vBU>KpIYhqQ&0k7arVU$NN91d2+h=;>i)pKSoUc3kzFkQ{Q*FL??vZv4mGMfvMs* zoXfT~bD85}7_VFo%k;7=a|BtQ54b(ZjK9!J`nt7+M`3m5tK|6G8q7;@M*Iy*1Xob# znm>Nek)5W9CFj{De-krMrTWSpZ%?XNaoiG8LmaTEA+O#7ml5@3)aWz+=0Efs={_il zeXD^IC4UlwvyKI?7`%iRG&k60C#li*0JeDO8)2l4Po9(Az=W5N6Sc+T6a)!!7*=DDn+!|#mCm~vgM6KUN?XdDOOO+?*Nz<`+zp#eT!1`v z2%Q1H7s3H}KN^+5>sR}#!GUEY9Q_Vm+;USXVX1(NlflRoG>#A!f&};ekmlH{vD-`d zK%HE`h3rMnAJhttlyj(yYz!;^ji?4gg#oE#nqO{l>QjUQt0)zWny(>1gGg3FvS+|>DHp0;b9CqZjjCqRS@+spOxp2J|7GdJH!hwzP&gk*c$1g>vAqr zJw$_?d$oVO3oOd{Rd?yW`{}w++_TL|li=#RZZ)OJ!q_4Ihq`kP#Tf;oe)|7GErh`U zJOxTjKQDMD8%KoZd!$L;PhO4r^SB@qJi>=MIptBIfC}>tU`GGrS_|A|^8Al$_U|+z zAWnh>ANuDl z=FJzPAVeq7Vh(5v7%=8fjQR-@H2XNw7VtQWIhTS{h8X5q5dH(wB!->s{9@z{yH-J3 zlPpAs{eHt2ACh26h$7}3qRupc9=KnVN6Pz=;AHt(Uv}BcV!$JRJg!~74!AHH07`qU z3Mac|v2(Riz zS|hH4N?r)g5ZDPy%&d8lG2rL;&V=VebmPV!6r47qREYixqCcd#q5D$o&X4v)eCA-_ z$pV{6GkaolNzw!QTO9^tw zz6eqf*0o1&sQN*jaNhjulw@(?yIoO+=jR)hf@X!^))AsJE?-<|6No2#kme^JZ6L*^ z=8N&)peB_0dh24Xd57V9#8cp)g>XXp!k@wm!Yhmmfv(CcX~KSbAxu9h9U68GF@?yI z|Ks-k)gzvsCszR;=O-@t&8M=uSK)^!Fem@M4=I7KRO$#nT(Lrk3#9OY6>O9as=Dr; zoS(6Uz?k9@Q4lJ~@@RLqp*Vjl!(757kDukQQUVDbWl~Rq*E&#%a_|^!?ABuRD0T%v6dVU#t2qY_vK1jz9X9vW8pIleqr(YFZmZR80tG_9fq%Qu%A6L+ur>VfBZFBdf87^QPyT%t|7d2rBKP2 zuA#|}La1+6s$|TwPi|4dG~B9grQkk?TX z*)um;P6GwcV}jeJC7Czuru`le-a@@xrwZ>VAEjvO>nhKJ&v_`c#wU)f7gYjcPgugM z6D-oz2C7L{7l zq4uuzqksR%@3PNn=5k*QwR?@?H*+m2?|Ub4w6^+uiNPA?FWx7rI4%XpY!A&**iiX+ z&8n&FS~`-K9M3$Q9E`56%?C_B8W+6n<-rv6JgK*VaAMd#xv^edQw} z<9LDjlusXTRSil$^6FhH!ys6lGT+mmMk7K@PUDTNibnhmk)(HxaQem-f=@~yjl$8l zQ);xf1k$%n67;>eQ?BBhJ74v+AzIKEjrVwH3D-C2IZ1Ze=dgl6yM z@s_)n(aTGiv)77*<~z-OQn3xDX2?(03|jKXEq{F5n)UO-Op zEG1pd{3`A0c~>U#)9F-QwnbFKF#YiUaC{p{yOYk_oVd&fG6y_cqR%2VP%bCAM~Q*M z11EYLd{5~zzx(>vo^LL6O-~b-R}}~99ACD`NWU^X^ZQhpxLxSb_R)P5Ym8yFQ#l{q zlfCd)iPD1g%HgW?K9^(Ghu5#%A|(KqhrtUvT^>U*P~1z$OVWOy8dWm)jYSPOS%PF0 zd;tcw$hmwn+~l?mWn@WPoAvbSHrXk^QoM<_{N4TKh(z+%Gf@<}{Sp>p7a}wy z#>}khh|sbb9@`nSe6*J*IJoNJ9~N#Ok5s(VTCdgg-aJ#4r*y^f-mCYCc-my->g&~! zoAV)E#qM2Uakez#5VI<68cbl{4Z%vwGebr9*0AB*cZX{)mmFXIM$+oYnfRFIw+Vy4 z5?8de9BQ^u-lRM&RWIkz(c@x6VC@lTDLP7tS z=Dce^w4`ci0Pl#wQ(Rn3WnJdc?!3JvLm@5A?-jE7rkJ~%Ew8!^@eKP?OSg89*8k4T zxSbGhY^=o|P~-yeKYIj7nh-K5(?g@sT4em5;FZREe8}mgpH^#~L(Rjt=M+sx>Kg9l z4A{$^pTst#_cgaI#>U3pLiLS>Nplkkcd@!7Y8!mM=&P7K@x8ASHoRZ}hy~CDKWJNA zgwmbE2PZAV3bp4wPm=G3mN}ysnXDsLFlBY(RBVX!7G9^b(M^Z_6Pw^;dnO#&7^USvo4IhJv5IOD$=A8SFGx@FuX4HtQ zvfR88Vf6w`jIDk*o?7yvZw|hJYL-duQonq^=7-4q*5AS2(gty#;v7?trg9t9Qktl5 z3DS3(<|+C1x{j^DfmY&K*a_Eu%lo-Ml@`#_2wQ?x9+)`c2Ex&kFYrEQ>>j`1c0Tn- zr8cuX1C3)tk5A%A=Y2@bTe+|;&1{50!DRh)&nx@xh2&?{P5u-AuvPb(sHwpi)_`u7 zy{j_+nL_6zyQ$dy1*_RxId8e;d8a2#o!49Mnk(2+tVTVelw}u{j`)6lu2UmAZkQzE zr}QsAkxikh;kEzxWNh568~M6P)R1L9@!ZNRg6Ht5J9?lZL2h%LJFiOBJ*CV1d@y=; zEm{FZnd#hqg;c-mI`5vyQ>O;4mXJ%bfm#b(x#vE3lt@>)tPk#maUijpGyvVzNVkZ* ziZFL(6yB3720{9_9W{k7QNXf2E!p(3g0H({22`*s)K~?c!+@ig1rx*_`(^PDS(Ka~ z{%~kIz2_kI?bczvp4ZmRh0&{t^<;#&iJ_^KmhVBw>GUY(?aJ<30|YUen5XQjddZ;g zj?a{lF_`2s_u~6vr$MIg>fmZThnFNwoD(Y;&WEU7k-}Yaiq0QwX)!v;tvT~6T3%yo z9`i#41dYuo@K{a+W`Z}E$)MdQOPO&icpHP3h=3`0#R4RQmK^YT38<9M6i(OI?4I#% zAnzgMttCz6A9;TtDB3DQ9GnEyJU7oE#3|BX2dV$_Wg^^N0Q19h&3aQIIAlr^vK+ZR zIk(EF@{beBsC#~3uScAt2NbK=pY*XZ8;AwU`4Zzkenz+=3Q)nZU{C)gNhr5?N+U}J z?S?EhE|eQnOoc^hPzz&%fBxr&HgyhK{Wgn zgk=?iS9I!#-UE2DNViAgNjg#Dbt-fL=!=otp~D6cGh&9d`z$oO|CT;vxv7v>4E}22B`jvSbQIswx*Z*)uyNBm&OkA2 zYRK?={h!g#fmTCO4=g;bQ^<7u9}(gj9r6B%=A~Ev>`nziXhG$iCC|BQ4bn>z`a38A zLo3YJQlNf6>I0R2PlsS-`c+Pp_qRKC1nk%vTI_b+hQ@D=r&w>fd$mm09U1>^F+;RT zUyLY)>>Lb@KurQz&mPd=ToJ~|#m~)}p!AI#Z_hOh!zP**S2x!~g)s}47+L}s_q`mi z=FEM6Ydm9_6q-%>fWRu{`gwwgm&KQAJ2P6xpDnj&)dho{^^YH`&CR^ITl)=fV_ONe z!S9&U_#XI+L-Ho-s&5sRjto-Np@_Fs!k_GWw{o4mqvObf7RrJEv6uqHaO>4}{V3^y z9i(?Kc}9mN*AF^LNP%?=Zzo=hl^SvEeBuA?FT0RsQoFv#TQO*vI(duB_>Z2rh_S%% zTlC_HEVZp6FcU`9rK>6`Wrk%Kv0&$G@~;!)g2b>5i&I(c zwRp9zuIF98qE`E^l!N*_Lm4?-Oe-2Jnwam%T2tbKPi?CbzmNxFe2S+HZ#c-@XUrkX ze+t19bp#t9x100AgVm?nX9kS#J5;OZxaH4QLv8bs4X-;9r4PW5jsuED239M7L`*sJq2AL8+ z#P=tCC#4GXBZX8~wzn)VKPHzGkGcgb{?c$-i;R zp0Tx_g;uLpDEc$gRc(qYR9%H#U}e#Koy_P&=x2?$WM za{U4_-sf@!fZNRsxo7^xft{a-)&^>{5>bgH=zpG78D3SyLCVHQttqf6RL-4*@!n(Y zGXj}tONgj^nL7!;llKP5e(Aa6olT2H?m&&9{sCj#rrvM*Qu=?>VLxY8bu|Kpo!*Q8o3<@&K4=g z6eF~`Fj05-m44&|OFv0)-IuzflS9G@^}Hm)n&iu|$I-R3M_SWu{7Y-PE`Uj%Je2nx znkG>%^}vf3eYj80QAeU9@=osCOD!Q(NKFZm1Qk|C2_x4>ew}WkZ6NmFq+gI53;JeO zTLq#P)>|8IBIu{a4j7qEcvy2%#W%>!9RG@9(L6T69^5MA{8LQ0;7-qrvGHGrjFDMF z&$<77uzI}1&NI1*Jr$C%XW)8C*?1#}j8Is^%k0XUFecl}3~13{U5Ra#1wy-C<5q~@*W0JaBWb1`F#lLixg!x2zzrK>@PqthwWHAyF4eJ z&J9o~zPclR1OAGM3X5pXJ^t(vVE2{ji&R2(7QqIWAZD@e=o!1OnfKHOlA?Y$n&}^Q zgWKR5*2xx%JmCAWe0Z~2nIE;0N{q{AVNx3-2^du>Cl|uFQ(+hL-eEd|gT@$$S{4JcwTpl7)e7%^*9CpWcr!|vz?$?92~VfFO1`(i*K3C3som)lgbT`bme)ho^&r}jF;0dSyZD$E z8=WVB=~9`KC~!cg?-S-+^Sc^nAE`cn88MPASrgsgQ_dfO2Wb<-H=bD&Y*5Mr?ZE@v z&~{`hEmjz>d~NaMr@)wp7?=FU&awhg-M-t5Mc zquBK^zN9O%EM|=C`|yi)2|QYO45LaSi0N2D%2Q(bXpOc~5dxM05hahz=c}42p&>~J zry+QZB!GDa26n->BFyf3WvM!x$OhMH{R(ex0bPDf^|Z#Xe?^HEXEqC}>Y5PV2NBnO z{NZxf6=PKSG37D;dyJqk#uV4v!P9XiB!HL`7xkemdvv`HNj$;N_XgoloD=o_P~zJw zf`m$hyk_HvG=c5%S2kv#A+~1`%3XQ9T)~$ac zmQ=|=tkobqBVHkM8V&I7(xElj1a1WZihcrbb7SkmuP=hYvHuLc(Dhblm0UvJxSn_5 z!>j~5wnX;4DVOJu?7%ebR2nVT6|aBK*K-E1-xzY2FS9`LHU2t&B!n}d>SQOxeUz1O zI2X_VhgxmRmiqfK-V~xp|3rQp?ZZxkO+}cGc}6~pe@U=$h7`txxcpVg1YgNtO*a2T z8so^-yaX}x?1XT{UlT|eqdt0>H4MvaEeDD>+fa3Cuw$DTBFvo`;t5uSiEUe~<#O+A zy$qo-12^swP?Ul-FL7MZb2hqx!x>`Eqoi9TwCz6@zr!zqsg~K#-wD}1&$^p1$6Xy) zORzDq`*gW6i(JU42t)TJQ&mIqk{~=<2(W^*@7dAgzFL?->+sk1il09}*X|(1bs|Sv zs)U$?jf0K3Wr81u|B8?%mP06p5eH1Oqmi_~3w3l&K=k>_LgX8u|5lx|RP_Bc;hPVL zfSd3A?SV+siktYtSI*DFG3fvI#{^NMCx@xt; z31_#Z&BBh*M}Y`hZ%t@bmigzWh^>la5M+Yw`n7#bpl_`aRSce|$G>6$eZ!YdI!BEoc5FMC_r(Hyt0+%g1FaQ{$5 z{;=xN@Pkm}rmAn*dAQ3jKItTNjWsj>{q881_=VHAF+F>5{VPtVBv?tCfKk|JFwusa@t?m~xbHBFi z-e_9K+uFMFX5NOM63g(cHmWfKI(xNSM)3!-+@&0Sx^lXka=XA074Tl8)kIdgr7Juh zER?M~xoRs$s1fEA)#oK!L1k@+wtT{UbrL(_{m8sf@Paev?^#?Dt|bqgWjDEx`_bKa z`!{-cHTn=!qNX%bQt_kpCE9bHb8O)GrIYh%nGs7%{MYD68VUd6eJ#QoUb-rC6L%9~ z4z307_X;VK@AqVlA&wse-mC%>p$d7u`suLmIvlMR(p$-8Gt8iP1Zs2K7r(qFVR;ODIVED!5u-On5+GBE{p5bkbK~@(+}zfp zJ2<2ia7sxPTgOB2(7af`quT>Ki9r(AMxx-JQI`Zt^vz&Oc~IU5;u4K59;mZ*pt7|tc-w!TKJB${zO3f8U*7*o zaXp*XOGEv7SB*m>HhcjR_})fz8&Y}pn^GJFOz6;GrP!CLbx~D0 z{a8OIxt||Jn{?${yVCD^4m&PxLMOA{moc^Ko^!_X{x2ZeSh1>)}s(sWy@TrfkVb`WmgEoYl8cR9bybAmPzFtMtwjNxvLIN&y$!m+h<)^w(JV^s9@Z z2{~i{ziC^mzTg@vZ7G|9mhz!afq5RHVke}&_1|{ficXK3Ies~7gyNWLZWxfiOZM5Z z=Lji>+j_VvguDF9bEc5A$O+s^X_>uaO5kmwO#MiP#kFq|g>iWT8ISVdOQaIyRQVc0 z(TxWW&3t8~rsQhKO_Vs=6^yZ7oN;_&5^ZH1s;dX<@ZW!K#C1HgXzt#^nHgezBr%4I z_cLLmB;|50PXz)L;t8wW|TMp&i9=s(1X8E{(!*+X%Yas8kF1Omo< z;seo$$ovTEpC+dwO_){+S;<2|^8P7%HYDR!vYZEy?H(a`ONAVXtGvoQkQB%$W?*`q zO~8`^6`NAe_MLhEGeS+FA>s1c%FCUKikGyGPrH@jol}glEQqUQ{Wqhg-OzwJ)GK=2 zSB4K*Y%{7jfmmZCBM-9n#3?h23}o@48aFwu0V54E;FUFYz0QmgLenyU=Wn14EokLl zfwQD<<@YOTJwc5wm(Z1uW9$LpFO^yZVWN<&uTSBso7!4AzBiHsEtyZXsil$lXY12i z3Nid;Gb|WU70M?3bQ>15Npcoi3sMsQ`n3>W;(+qOYSo)Twku_8C=ix?MStQB^!j&f zfhkTz1>*%_@)VKmqdN0nOt6*Azri){hkB@0$#75l^V(;3e?eE<+}>-C{kFZ*LY+{f1a zR@F`*bi?$cbWd9rFn;7fob@nti5aVd!GZpQpJ77-@k;$eIva7@ zkMsZ=iQHZ;eTgRsyo*D2PRy2H)$Pe0Q_KDfWF4z3Uhc3MV`pw((^e{i z8m(__=kSBa?e_x6L2}s;h@q6la2H}!vnqe`-Ksr|_w|o`F?Ul1!-aU<#vt&WKtjJ@ zW9ix@V!Co^TO*2~vVPs9cl@M5N4j_J{E&Lk)BQ_Jla#uq>ZwN1uSbT7mu-P~N(qDu z-+PM)`-=Y|A9QoQUXjAMJug=x#nA}i2;ISttcqtr?`XZ5Ntm#6$kcCAfRO?DR8w@` zkq9|*>Fx?W9`|LZ)h;_Dq@=6zUwUpkV)I*!Zi*rfyfpX{j9{XgVWT)>S}C0G1Xf`Q zH(!pnK$;)8f$l&K^q+OS%8YSZiw6o?q_)h#C=9c~Gh6QGPuSc2Dz2L0rItMn*8hdx)SpXHKX>yk|`>3*cRqc!8 z9kviR>c0cH^Ghz-@afZ8(_3h z8Q(~escvA2X>BoK`82S}e%vV*MBWst~)yK0YNB)gp;dHYLO5 zSz~9|*@F<=CIHCVmhi7&h-Y|QDb_dYjrOP=}S98hqUOFRI#vczD5zG7HOY9NrN3#5RR zmbfkWd|gJC374Ig=ND<$L6GnR>OCgKO&{9>=OBoHB>`$IqBabf7~H*X?&$4jWrh^^ zx}thP3ju@?M6vE_Hem)w#E}P^%;@hgtB98N+^=8-U#z)lo(6Uu*b%DpD{on0e5o1k`N4bFiHO}(`v&c0YqSE z)Pj_fQ*%{z_C`%|6qD^^cZw(X-M}t;IPVhPwYmAL{0U7*psxU9mh;7g`Hver@D(_G zUzZLvYF1{L(aqn}O8tmvpF!I8HDKe%Z$|VN!O#zv=<{SW2Pb0;sX6mS-)xIUg(Rup z|EgZAIr@?U_j+9>TspVkI76vR=ltnEy%Hv0 zuw0yLm#v5s`YzEK4F;A$QkCgDdldaPqCu|_zP3V zfxqZC%`amvIj`_be>+(Cjf>gbB0Tmez29ryo(|z|3E@(jg@2x?WFJzdKfB!7zB;T; z$rZ!wm-~$ z5o+HC%S=u9|2<9??-eG*+$1xQy5pYW+Ai8BeHnAJD30PpQfKKC)n`{J?RiUaoVO6J zOwS8@Nhin?}{Pzh9N|9)Z;L$y;B2T zbBu;%KgGx8Ag(6ZlG>iRJz6CLPq0S3oS+AOG+;Xf^rOvylLQHN>57Scnz==QQV@Sa zAf1Gaa~5Gx-OAXiKp2^-i_zC72sJ{Zn`{M|_4h=%M&QxC{}B~ODSm%9h_TZ;e zc37YJAwIqYto^o@kH0|&7!ms(oxt?@B>4{z)_w>1G=XEQ5ZXOe;rJvbc$p%^A!yVxrTK^@43^%0ieb3iS#EtO;?Ql{G-?-E` z14Br0|4bjFFmhJ;L5$#_hyq8>g#Pq1e|qB8*vdTte0@A!XdNEr#LA*9BbPd(4C+N> zfKmV%Czq-3&k2zY69&WFd1wPh&*U2ulgrlc1B`-(M6X z8WWV04FxZ~*W;0T8K-HrwqIW2UG-`z`=o802;NH4Gil%3g_&|uQStNRy~t=osH5ZJ zF<5xik4^4vFb0#cgNBjJ9w1YsCE*o|uybY+p{Up3m zs$c#HlKo)K1`*h%$DWb3socUJMiSuVlN4-_^RG%ZYnMckCDe=*xBdg}7B@+A7=-|5^ASPb4p}Ewdv`A zEQgqkrN>=x-{w}#lZR&ehqt6K;^cr#XCiB=yY&kWBn>?f;>|9v0OoV|UC z@^e6n)Ve$%jomJlD|2hAmjIpRalU=xf9boMRX+W*O5yj6_#8+Tg@g zFxrumBs^jrc37OJxshf2(CJ~Av2ed}I&RzuY4y{)(EIedl z1YJM;wwH`sgDWD-G+(1E*Ir}uJN|Y?h-cXAB|i_OP+5DlyH}0^n^10+>D_Eciih2e zF{1Y;;~TZ4kp)&BTS)K++YI3oe9{~#K_6?zc1HBQ`}M(}-&#Kn zGiPh-AI;`h%NI|o*OqcwgPQmQDyw2J^6 z>-!lo19?x_I0gdz+0a)E5#X1t z`oO@K2c;{w=)25DtZPOPj}%|zT2cV+-l`(hRA++~zfn;xNv&xbB@O$?410&7IYTb2&^xYeCZT)onnKh8P&)_ z^0=cu+Rp%a)>%fS`h=X+%ZwmFNDBd;`5{heO$6>)F`-{nGsQ{}A&27aeB`UqO>PUc z2;`7+9~mRIh_%=>4vXlicTxWN?omHNr8&ZYP08#LFZ1g$ z`_dS())j?HvStK!Zc!}d_lX6PD;1cpY8$y4NVJg?i7(1_6eB~&sV7&EZBvK<>@3iqG;r&4#qTWJ$I z9H3~1?b77GFV1ZAjtu~lKk3qZ3*M`l_(zwW=)anRrAn_Kyg#o0)HC2IG4-(?|5lun zNr-f*5HEM_xn`fcd?~oucfE3^`$SXl!xtZA*P+4(p}) zq4B|*+6}i+O@*`8!16?t=Kn?2TL8t;MD4<}i@R%rJAtqScM=E?+}%lVf)kvD;K4(1 zcX!vtgOlKH!QCD1zTbEMTlG)X*3L|A&*>xG+h@A_d2oojHvzTDcLF1qTM0(D=6%_3 zja>(j@dg|7)6!VQ%?#D4o-nB zO9USJdp%)rekrvwzV_yfOyn=~3luxLnY0^ZY+*j+35JH!S61Ff9}Qx2_-MuBCt5n* z!TT!rUfjK@U)QQ%VXt#Ti6>nOgTe*`-2ZIX`Tr$cWL7Fw4K==OV5Vd1ZWakc&fmnY zJ&^i?i=Z8xz1LrJ>PbJ}swMXIyzYm@`6u`oA+(K29pAm=!kQ;kCs@fsth;?ms_^5u+Hqr;T+*!(7%Xgd<5n= zF2+F1pF8M(T4HA|I+!z>@(Mq0H*c`_mDc{gBF(_FeI4I7$RawT4P#!Fq`dnA+~%{b zy&PJ&;E}#r#jd@*KE+gU4J@5gn^A9QVc9MFn|Rg(tiXpY2eXxp zfqpn}GEEuxqxJ6YuG_O8dxNua!8jj+Dk%|uA3D}wB^(r@VXtb8sy~F3Lt<~T}NB4*kuS!M%gZOqxNVV6oC=V zLO6N^B#_d+?|KmlmUTRzC*N@rwpB%tejSe|sHwZejmuiw!>@}|pb%GjpDKjK@b{=I z?)CeuY7_PBz^z#VKOtMOQuA*-f0Z!Tlk8$?BzH9nkg}+@yV1(=F(pQdQ9=E<7Tz--Mw`}sv_&=n+Yh6<-IR8k&6OWl{!y-{ z)P70$j=#-~tK2`>U^GN2npD)2+wej>r(mbl|h@Kog4c;gWDRw#1AnC8Kv$Q+L=G4^@!?BHcu! z*Mgr-bU%_Kcg=i%2jQ7$?M)uEOC3{Yn$TWyvLcPxI)1;H3|~TAsVRJ+<`Ddf91=B( zqRi&F$a|~EiChKZfk=EV0Ay^aowT*WUk!?4G*w?fNt%sw1;^PDEQ5V^eI0u5C!bJ$ zC)1%K16WAV61n7-T*h-XGaxm#n zAuF%|VDgXA0drFzeb0r)pO#;}ir&0JZ!K==i7nfES6})lLMK{^<>sw|d^(P!>ordb z^IL)e!J9HQA&GI%$GX#cI6h}^NOCh=zzt=pc&q6*h)AH0vstY=t=v#}F}^Qbuin!fwn=*8MPV0}(KU49L2i zuE*5x0CYbz^9;_ZTBAZr0A(j+WhmJI9>>>44L@rNc=yupzmaE!Icz&7@q}%yOmLib zKQzXZC%Iv$?{b#z>C3-MCeAWPWaQp7aem#%1@#w{+yAiNtYR_fql#$w@;1Y4U=xc| zJK)GeXMCE!^oQZU6?=AHQ@{@w5Qbpzdz0OG-%Zl2gZSB&ZYCTCv{zX%v^nV-D2|8;Bh?nh2jUnj-!3Zg~v8 zJtjiA9JS^dK}r_B;bE*NjcVcU5lc73%^|kWw1Va7w z+z89^s1b=sP$>L5ygn=yvjes|)4?>o`Vj}1S*xFc^}2P>FyfAniV#EwNySwF;~8iQmwv@k5I6a`$;I!QVPaqizSyvS=K>F*2?^NI zJ+*_V1~7W^jDfOx*o;AYQ=l5j5mda_0MHqO^w_vM{S{zVDtyJZL=3pFAy0&J0uho! z-THdIp@k`rk-b3hWLD2Ia0*9`PS_;}gzX_v^2rl~*H|6cKqeW&t=M%%h1OTXFvYy~ zWTC-B>k4ih_DD8^*M0x60&z7%rjOeWUtZ*FVB$o^-xdoNm&3|sKD8|JQX##@)+i)Z zWx?hdMfv02o4LldAv1xCR7S9z07QS#3N|n$12$s9g-IZCzc*mJ4J2f+Vtp9q;TyRg zGi2bJ8kcc7ws%c9H;Cx48L_l7Q_b+CTmCiX+J!XeWFMtzxY@2RZfO|B;S26>fiFNI zV4RVu00N56E&3>0GYfk>tnmOh_%joX04NEW_=j2nZzLf83-Sco6k0yw6)P-I`C;9AqjvL)o@ZClF4$QBJnk&4lz)P8}Nf?wC;dw_@~B(PmLgy5D>@G zRv*IcY`I@hLvh$Zz2#odNI<;X9!o(&j_DB+P)}wfHFH8tvl*UD1AW%vkiP|nJdl;g z2%Os7NwZE7(djm=`q3u)p0h)6*1CYVk>Hi+f=ND(pbGWLJF_j6XmG#31ty|@*8E2j z%Vwyz@Uf`^F%sH$G1FaXD@6s~T`~ms{sB5)r=>##W{VvaUEP@K3Dgm!L$2O>(+z5` zt$GVq{;=0#WVm~8(4q}6!74h04um?q?IaJmn2q@HvC^Q)Hv!_rQlOrue`God=d+}- z)|Yg6he6Xi)8D2+@=F{NGb{?^W{-21T5Ux~qBU z!X0Gg$Gua&fm_acBJ+WKBxbis?~oGWhv*G~iZ$Y_xe%VtjF8`H%4oYTaZqdYYW1=`X={wJBX6pS%enJ*A~6gd{!yQ!exqDs{Z`ZebPm zCi9vzuD5XxLr&rcW*H}8Qb0cq{ViI=_2KV8DTDXOQgl?Xf1Z>O?6|6W#5t@zIs>Y0vi;;jr zxrr+uP%Sf5iQ4)Fz_lf z3N5pv&VU1zST@Gub1{x_CN=IPAgq@MrQEn>SDeKy^bF7I?9WF`2o(bW{W645tFQpW z%7F90BV}PlVzwsZQ94G+>VX9)MK{LJ4>jra|t=K{Q0Kg;-70-)HLB<7Y`G-y%5}n$kN1p?Ie1e%W#X z;_oP69aVT6-fpc2Lgclxj<(Eksimxq`|Akqx4_H=Xo@I1W`!`O^o^>Ar<7WYE*#)_ z5HWyu{KH$Ok=jc$Y<)sal1Y2~wN!%69sW;xIDIc`*q%+L%ou!@Aw<9*|*925XaKa|QJ&FPQk3HpSpvY7gm3F|(f&ctKrl6-G18b>2g zhLAf{e9*$60>hKck^hq|iL|P9vmX^QNNP@q@`Dci(kP>EX^A(hB1HWOpMyjA&fV~V9fm_7OuFA#YG?+^Y}qvEsO59=yp8o zxN?<)?W;H?GT3l{nYX3Eo@Al*0EE{S=ziHr0dM16*A;aQevlq{?Ud@)wnofbVUkjL zb6Q!9aNA<~19!$`B{*pK_V8|uw>f$9T)}%WEjLmU2tCfloyM_a*G=+_4!{aQ#XjFw z+d9e{tS!C7tFuA+;e~|oJABv|lPZfkFn63_{I@o@GA5cs_7y&sAT%zKyfvi1_9xC7 zIfXdM3XY8|ZS#p;fZ7@s`Ku}%{C8Qo6Z{e>>^|?Z=L$^!7*t?o1~-?}-6a=;`%6&c zX*=Qba5?s}OMN-`gOB3c0YfNncwBLaIVAj#JW$>!L#s6w`wE;sg9W?+zK@_2`h^-l zVCVzQ?!XMl~9n8Wd^G~spWRrA_OB;r3rB=g|`AK0+ zKfs2}6*+u@C)1&w%M|tdJEMs2j=4kbdQ=JiR4$nCemLAQ2n83o-MM!C$mlr;P|h+) zdr}BPedD&f|2X;S-}e!9!4nwgMl07NoOkShXI4P#jGr~RU4;m zq)2SgbEg7?ITBg*tY;654jg4o?rd>dYz-B~7Hon^C`|o*S2rwpRVq z$3Ipy#BKr$j3dNtCAPD)LmqS7iG~hj*zAys^9x&|r3a~i4Jn4Xl9ZysdFX0##TxKI zXcY6>cTid+xPqastnxXigM4^zpeJt}?%4wU_j(^j2PHTV2dEki^mmpbdbds;nX*z& z9Q1BG-x;YDyX^D)1O!(W3o(R$46k!JbD)^ALaJ0%|Eg9&SZ{@t!wDYy93o%BFFbMV zN36pQt=}xkUY2Xt*d3`b-6(0W??q14s{Cw_Hdxs!KM7t8*`6*$0iYZwHpWFZ-zLGo zKP1^9&CSz|sTNSNSaVL|i0n#xQ4NbQp55E7rtx&xs5(RYfB1-c+b*`DaAtss+c;7Z zxrh?xiSDDhPO~Km|gk>Riy*msGXRAlev^th`6 z`%%=Lk2dfpR>3G2_5QMmWI$R~b1HA9-h%M7`>N9*^~C*PblmOAUGh}Wb#)fSVGl}E zDsYB2+DCP56vY_Mt*P-3;MaJ*hU2{k4#5 zJN)_Obz(0@6PjCjkml8FBMyA{zJk@WRWc@a%tK8oO#5$!p8C4@Wgad=#ooS)vke{9 zGwKJ^=Rz{G#RK0Pd1@4(jR-+|(|dRG#Hd4z()VvF{cXX!pxsv=Mj|}QHnTlEI((np z9+E9Z5a#Sh9Zqkx7k=RQqHsF2d zX#>dc+M*-r_>i8tB(~rIj}AM`YpX_`RF+4?z^Pg~b<7Dyc1*_oRRMR$z2eByIU9}V z$1gAU2aAOd04&vhPDxL7@h{bjtrD#c`V7yNCiC_4xeYtr{o{QTP<5s1Cx(EKRP@0~ z^-HL?-$N~&=xK5IA#$00OivMMSLq*H7P;`-`vPQ@=ZrMgr40DoCID@0diM1z25r5Z zZRr`zU5yC(6AeEMAc_R3opeRImfZ_w#I7jY0aXSj%O7t;Q7u? z&B%WmaIkFjZCBp7Fj%l=x_37&E&cRHnD>g<01NQD+iVaD!jgu4FD}~NaU^!wFA<0_ZuQAs+I4V(qGQX$H_Sz$bS9{$MDF`LXqA}#} z#UF!s;obMkWhKnNPI)`$H>-ZF6<5RQeMBWk+n)_MEKP|l z+6kcprN}4Go4C!XUb^~Kz2g7CscS8^nkR?V}go@Ik_AMd{JUhXbMMN~q;Ar=xl!+ti) z|1>pbekQYnNa=Nk=krL>ocyG`rnRKpJOHB2GE z?~yY^LF+=^wHg_7_LQixkV)tj(x1GQR&is}@&J1Upuh#)NuV)YXbypNqzSDmh-mFs z`hJ%A{or2=@%!5XUcJtP8lIh}J>=1A^72n{TlBnP8c()neznxB+XDWyKg$)5&|tb_9NO0TkOOOb63)!CS$DAi?1StLC;YroqUWC=Mf(j`0GMr9Z-IwOGVi zBPG5IlHBODJfC>Ap|qv~AcNH#-nC`c6hqjgC1R(zKf>&$`%IVGj z-&z3iWB~>t50UrM*lW!4@3>Q!<$%I#slRejtw5`alSihe-z&fBefIDWw!NZ8}`E2ad|mvyS*T`6R^B5?NV7g;%9Q*a%_{h*Ya(3k1cYw^_`xD zQfn1X8TAX6`C(EZS<}X50VCimrR2|`mD(nz(lQHA%|EQS3OBZ~%KrrVS|Za-m%JIt z&?n-a7h2i&d^7UVwD?drK51?52ZxAfwEGH+t7trFa+F*e zE%9fsfzW>t8f+1MV+4kW0Trm*rIEQ0y+-jkPPtrh8^x&kS;B6=5v$ned-MyoB!*Nf zdI6e4zF#7NNFaB0s5{iD#8b1rd|0aG6G25J;k4S<5{>UcX-@v&&s4@27gS;{H8o;! z`QRmk7Nob09S()NVbp_@7lJ&QcerUUBv;K#0@39PGCQ4f3iR1FlK$@|r%V40S&vDU|p z8zeu*3v2I3@5)M^adrObMwE9A&>X|&p2oYe=UMJH8GKzNyzAA5-@J}Xnk{Dqf@Wme zGK%x^ll$Ul(#zR@C0Cy%!MrhkMtWt~R>jnUQx+aidiv+YBtfqpkHUi=Juk{G-FWpz z(oMeYe^K!pn5u4wq*mZ18}^de@im$XxJ;KhW3NNc9hA4h508*CIg7I1-NikvEHKUf zDIx?q80yS?v2mq;`tbe`N6TUV+%3UDVLauZ+fsiI@1NOpi}kwSb3&@u##~ow4S#;z z=%RYPb>ehB`kgFC2=c$>v zAZ_<$61dB}@O$HX|4+=`==?A-?WJR=(&xT!O!PxvW#w()poL%JWT(pXj>3>f!fWZB z9*>!2#UI8SA+__SoWJ@_XG-@aHkhg~{um238-uTtt6PC>NJ3Xknl6|-ndxz@OD?PvnE(80TtbO{ z0QA5gXv=r-em=3cV`OJ9XgGgDhUGbIQ|ZIU`6I%pY!)8H{!LeA?m};JCJHLhT|Q^V zWI7YiU7hl4(Q@o-Db+p>KWe2FemXCa6bpZjSO<}e=-EELj@qIGL8R>UcZQx7QnHdC zID7RH?DAV#)lGq(s6&tE`sZo-*$qGl^t#Se}OrUGz@ z|5i>weevEXO39W<&8k8}11O{EbGZ&Z7{?X_?){`eyCG)MA~0T4nIbS-E24e-^=l0% z9$Meh%Y`gxmt#31-q?jl+x*bk)z~FiWIQl(we+a*t$J1|rLhnvz1GX!fh!*5?aWnZ zpp*aeo4(AG);*A+$_q=^13G=g{|d5xG<+0WH7bvKImkhoQ^owW^uu||Z>74Tbr;lM z7f-U+NvpiE**JED5Fs@ynaz2xPeGb)GS2lL?SXIjgjq}f63{!K0Xw2kcnL763qr9y z;T7C^_q0=)@iWYs(iqb6&N4#u|7yi(@^b#NqAlP?%>R`$IL`yh@tk=wuEpUr!Gc7} zf?;Tui=0HBDB$HxpEYs~w6^?KFYLh*$2*vnn+?X!e3^z9AZmlXnyu@HhWjron+& z3oQOVLu`|+-iZQ7NIgL#p^>HEO_>sx zja6^40Vmn$e^j)&Cv+@w@0<9?!JKrYLJYtr|E^1B6h0oay`1}aAymmv-Le9$MUGeY zV-0JA92gQFpz@+z6fs;XVL?|6mMWBzI-bYGpszBN@_z&0YM<9T7`wF)Ypg;aZ|)BV zA1f?UiBE`|=+5k`cB5ndnu2X_ZQF44vQD+vJneJRU=fHA>VkT4DZXSC51UU0??Hn_>ROCb_aA)Tqqo6+gcEe8*v~w+v=~Ab&3f9p4iybN!{?D zFawSN2=L4-nV&)G2gkHv!`JJSk^)&bAJgTr`oXv;xbjH^%y40*+BHT1oUhe@5CFu{ zSvRnZ;5TWr3J~C5O+mV29;kqEefgyaG60B#*cii-i0p5{1lwtF^zmIGeMGDRcvUbF zf*t|@;2tiVwCC!pF)=f$H~{Uq*@tsqVV;1I;Ruk>m=w{0DbQp~6auQbe>WH0A_6MC zTmXDwD=PfQ|EVdC#5vFh2j=X>JrCx9-}&!=cHxa^!L3l0qUqj64~Va>Q;I`^8H^#O z9sip60rAfNu5pO5{h@eEaQMy%F9)Ejt%45dVpDd2>wk|0uMq(?&?OIS0AS#f0+CvI zw#zAN?Z1oki5)U1*&zr~@IP#ExO@%0!7WwzsVU-GIG;DXCjWC_;>c6{cQT;uY$29} ze-_LEv3|4jnFxSp^n9E!4%fwJGY0<$zwU{10|a;&?7*2SkkGNI0rBxJ5Tcqp0V*lF z_t!2y4y-!-#Bzx|= z^%ZRdDT9nr1{$OiZMAR^3pIG2hyf6}p=vXZ|3H9!bot5WjW>Q<8GKx^pt zGjK>#_cw67cwd3?I!!pebR-<%-opti)P)dEyb(VpHPO4{x(Lh41@!&Vsn5nt4Vj36 z5&#^ph!jJG2+aU9;sg84K>qk%gBzVKPbC35paFowqXC7zz#Uf*HQr=dE&$VrLs9|} z{KcvO0s0KUpm28lKXv1xbZ=8*wNW9Vpy_O1yhM zpb#2{1a0sKG5aXOA$xP+Wf7skESw$0Bi-nj`5h8;Bg9&-6)z7QDi2ztMP7vAX3Um@ z=uQ8s0Bi5qNEvd--jINRiW~|;H((9DZ3zQd8y)$9ihDa<=j4C|44G4jZ=~nj8z6!i zqkq6P7KQfKB3%gL{U_2-kp#fcm_>r30-(3=|3_ou2)JFFz(nFXZD@hUKoJ;Kxn<_U zGimm2IqE-)ms*Y-^!^>{QdlB+$b&G^VcztlZ+7 zP)0C0vh7MM{1#hTm`HpBlvN1D1Hk$Lky&9V&~4Wbpe%V%)}?>}AR`QL|7YDV=s><6 z>nwD)?brae%M6?vY7&Wmu-f`Z4mY#B&kijBe-XlW4Xokcul(m*rVQ{lN_!sz^30`#eY=vQ=vks<)xHGFj-Z2X|%n}jr|3Gx4@-=LFSHz>;^ zUHLzCfmt-Ti^E*v`+VROdJ*%lIzo5bIFYk1_1i1;5#b@XL4&)o7a%7tS*ebxEC}~U z)XNf>v3K&r|45?*QfASR;DLk8oQ_VdlKwyLg?sUDF@T>rOXN180{mxb_XarI3^Sk( zLS*(WYN*I`uuz$M|6j!rc-A{iv8c17-t9314(WhH0tYT6q5>qozR<-(O%CY7JA=eE z7Gt<6g}fxy+u`%{w`p$y`XRVq;ZRUn?hXE3ZDSnY2$R9$dvJ|aL6QZ410E~%zc_Er zhR1ope{r7k|D-Es{@a(_aSftyA%x12qDb&?Hu%%bs4&8jUvYtRLyA68D6kbhor_N0 z)ltLt77XZj)yU4EAww_r`}`%<)8Q4kj9;m&ziGukWLIO?qn=RdpN2dCT1I5nwY)@P zMAgeYZFZr>@D~6YqAb?>DcEJ3tvh;vC^)2AS-+J7Fr@BVX&?LvqMp**Zb@W6{m?k$ zbL8&`4dA*eSqbR;;)atn(I%=zNw$&Sk6{IY>z{3;g#)>I2TRnY%=DWo0HLI< zs+`8Y;nntQHlkLm`%Gc#Tjo$}vl~g$&eP}ld#Q_tGS!_p{M4c-E6wjth=I0?v z^zpd;NR0$nkHM{l?H4E5AUh&N6e2~J9ZH?v5N$6!EiQYk5f@--e68OLqHNY!7ONQ@xBHrh(0Z|? zXOp}w!eXB>cIj%M^rO+|tk`)*i8z#3lfJ<%W$F{ z*Pkal^3A`#-d`2h7tFp_zgyfJTsIM`8b0V%eUt7ph*AJ&Gyd(d7Ek;438wzWauk!n z=3LLT<%>rghizDI4!(AS@29_1j=%Kw?45s?i@wxbjAP5+?c8p`(nyY}_Qs?sGmuwp|k!s%)(^>aA9?m$iT+->mOpr<PjiYnXgBku~V|0TXv!u*LMwzdJp#h z+>HGwwbN0sy=-Kbx7U{zPxemi{a&fo#QKGq{o)qLP-?lb*=v>fnm;!GPt!LkPf+Y# z1{K*@w97d5Kc-l(Oy8g$Pq`=2Z9QF&&%t{MzJA-Go5`BKWfspKuicfD8o3&F{Fe)z zslrx`>W151ga)Exb%&2OM>EdHMk!g2Up8U~HFw5aU*9lYzl6AnRS9{TP=08=fJJ9( zq#=hlV(Gqo(W-aINyT|onzYbmof8)MGyiz6_B!l#Cz^Y-{cguAq_nu4GoXt8Owy)H@LTq*JTdwdL|` zJka?!z3+%nf5zw;N0ln+tLx$B3$@6%B?>X^pMNpzX`@QEMexZd<6`Jq6w4i>D;mv9 zmU)XcwqSN6(9m!E;cDDLG*z)2Sjg%j6j$a&Rt~f!r`+75?gpTP~>~Xh$l?}V7 zS1#5PiY(a+WPHOjFE*#VFit+|?9#8DfC%tIlb<><@b?@W%S zH{X>F)azt+oX?GzY}an--&`DxR%CvJ(o834cGze7u@c#?+aa@BY}6qcc1p znAlhM8_%)t#n*ur^oTBK?%8^6!#fZ~^nUT!pB`V{PZsNHNTBucg=ekeypdzy7r6AO zrGsv;)jwqH)bY&nBG7XemlF9sB-T|t$9Imt3T4 zvM(2b<1;f_kPG{ipJeR^VHteI``(Lk>M~R>pFd-Wd!($$_{@4zEtNj!q{Vs|?tUDh z6BT{=%vOs$^g8`e?aJ?P+0MR{mo&st1qt$+;IwB+8>e{yHpXpR{1 zB!1fz>t14cGt!$&4j}J_lekU0V`07}9JLJMO*K^ct~L9b6bzAZ0eD;hN1EuwRy+U= z3fM}aDEE9j8w)iz3wPk&?xz9p0nl6oc2+PBXTM*G`DTI*86w#> zUPess%P5L4m(Mx&@?b3 z045a_wam&!a1S0Bjy6KnLWFGtGALF3UH}M_04~D_EFcu`Sl_lH;EkRq0Q?1&{Q)6D zCZu_A3dt#8&Y)!AmNvx@4|tWy>H@6Fr!s?~pba~7d&UFxToLMVW@QQ|RHyS{4qnhi>`Ap#{=Yz8BruSQ5v>prK}=n(#|#&Fg8x&<~p;hN>H z^FV;$FnHW?_}9I1M$UtSk%6f}xX`{`z}mfDuXQjUUKaSQZ;uJ}Mw3kCb<74xbBqP>IiZWHBJ0AC2*U_exP&F92A_#$$u>&zD8)5FPv3BJd zB`g8>ZAD{OAPyk@*Zk_eB~YrC14ZII|5qCqzy>Z3lYl~8;R9fc^*ashO91QvyvYBz zO<{(QB#+W=cJN6e_4}SUzEkQ?(8{em_-E<)<$6cQ0O!PdW8|0V^9QuDw-J94=!xeUq~Kd#Q*J7}eCxg{cuL&BZ#w^vV3moamS;OHfr7 zY4bf#5S8ERZ&B5JJh%9Q7UjQXt4d#xrhVv+n(hDoOV!Qw|AG@=cQW`PKCh!{FS$F4 zXbs2e`O9>zl>uQ$kJBmkO<%gF<%5oq(xJ+sT(THpbaZQ~|KbrPRU-~7G;jW!aK&^{ z1#-x=funs_kP|=Qx6;jtl}(?YB`;kix6LfWeXw6!=A9a5tyF%}-wIjfE6^`+f&^Sg zOZn3(e>{weDK++7U%JeZl(wY^JF;Qi5^Q9~f9w0psDXUxiR5J2N56z)Zq8G;icyQA{DQ zatUih=T~t&r?)_@Z}*)$_2Ol9v^~<HI`0{K-KRE3 zJZH?%t@hi2>xFtsy;!#oNv>HnCid80<_$=b$(L`4iv+f|XAG^QMs;vu3al^l!etzZ zA@!jOB2~HS!iQ{QetcrrB3Hn7))yTgxJEQs*8rEEmcYRmm@W+#s?6 z)2mBgk9zJ_UA^n7=TZ&?3{NQZ$2%*JXEihkNpJGb({INPpsu7P|0uq!RT7$Hg?`oq~Z4C`;!JxiZ)`@>$#ol8mx)kCd#{j z?p^$9PuA3Pjl1=6rDu)F%*@_;MX^d-oPcf593c1)5RtA8h<0E~<(Uk`ikxq4M!uqy z^tOY5km+KDoYL8xlOu%^85?dzzMYWxKTRz8`cpd>Lkw@a5sT+;s`F233XJBT0Eee> zz1nd?;H5U!#a!QLCG4^$jVrM}AtdMr&d{KCLuV?(;s*H$8iO{dLPn0u_6P zmWN8~P8QXwpJ(Sx z0Mw-2fDF%E4U>x2v9yl8vaeQ$m(uf>5*oDEf@9Sn%wVRO~;+tcACY1G%Wm&F-W+6@sN87_BvROLxi)^}u1tS~VspXV0XT~VR%FpZ(Tjo}mRUaRH4N~-Cbv42#I_^Sh)Ped>Jb%pUFDB==B=6Z zxhQ}*9VnX&nUS=snV33S-dpQE2$?L*Sg!ikZ`NSW?*XU7oyAf9F#t$9?=6p8!tmqV z{N4VA)xRB*-gbk+&UU}twXxr;qEG2BA`KiL=ytA!6E&emy6|qiPmE$n;K$O<2d+)Q zAP;HZXS^mF-}_B{(()PT7AIav8WQp$$(v*v6T<$5YT7oNt!!m^tKQiJY1*T{yace+Y!-Os|CHP%+2JxCsLvQ zol;RqFriqN-@?gDE!7Vv&GWZQI6RYcj;sJ>M(vpviW`sP{g&#O#|ry|kG@urjxwo& z(*juAm)*r6W_?I$%s)QyrR{tZ$I3^;+0;BA_`e(hqZfp!AB|!D!0yb@%K4VZQ@y6t z_Jo{qRH3-yffi%IK{Y?WYc}dji=35|)0XsKN5?Z2jq^H?+W@dR=gew7eM5x7*#;xXlDV!pgEm78-XQZ54qRor_G zZ#eOdNg!=BP*T#; z=xDbu*V^A3@ldX10dt#U@XzG(EHP1qKtOE%3(*AFVeYT6w#EUv%b51JPP-xZ%Wm)8 zG=y&qSb1s@IA}j1otjqHF9Dh3RCpj$<2hQOQ->8mNz1w>fZ4{Wu6=qFE`s zCqv_@+oL{4JK1D_(+g?sw{sBFF!8qQVYjzcI>Nj61{_B&Kttdr{~&kRdS)Cz3+{z! zdkN1;124EJ*IkGXQpevcl8TVlI{R70ZP3=LHs|EQr@MEM9CghUL}MGrw2y!&=1XgG zCO}E&Y=RC4>CpR`s6%2tm#O>_Xa!NSQ#GNqAZ#Bd=dBGL=8r$H`CW#yIj{gddNr`R zj~1$2ND)CWQc@WpQ5HZEuN3kCYFj_OX9r$LAEfRI%PC@Zz*(}|0n`h4^xf9?ZsH& zPyu7ih-nOQJ+!r$@q=M7L@$U1VuLPD4>f<4BqsL4DfLQhNqQ$%y2egV^gcbo?vW5SEd$}XGAV@} zT)+H52IVCd(;Qup?Gr*$08U5@@b(vgecN_S51qg2?>IDnN>Xqtwi^xvPl)2I^(5sd z#{&KL8-pSGKNwJc+=Yu%GCE?6$N)Y~kk9NGlM(>Z#v3DNBO|BRA700?uGjx0Gxa2U zaBcLm!WCaAp|z)jTK*Rvtw3D_;5)-~$oG@tBT)OEhk_5I%l5C=0Ta|rtnBCtz$Sp! z;)#P@#)gZh1l~>w6ehc10|rWR^iYlp8lW^tU_XeakAm17mG$@(_<*#Hu`$p{AGPVB zTwbpOaO^io72IC4u2rlSi$FIc`$$PiPtw2*2>>>KOnEh)%M2YF6Crd4PkamXx{jL* zUmWKvb3GuiOyUw*>$V&MoCi4`S3@ezNVa<}Iys6;VR*et+Q1E(!_yztJ*iwK59M6+ zr0YQy&-bXllZd!T)X2#Aupmz$GnfEogSxER3j2Hnb~p^F@1um#fF0iA7#r+Byi$w4 ziHxDOJd>lfY%@X!$U&d`sH`~?AL-55hF})DiGR4XnppAZpMoK_! zJ&`{(s7wQ7gR|C+YOFsgpy-cSRUDyJ3n3|m3S|6n`53AsN3<(LPnuw=yd81@Ro@!( z{orpg!NMC-;+ckciK(8^B#2hL4$5OW5LP>;Q^9_3R+Vk3GLYm#+jk)ZavI?hup~ z3@GXictw7<4?oADhwg&vQUIxgGvL{$SgV%pGQaI#?%Ev3Ot<_*7lN9{^IKFAM#Mgc zn9;CLt>iu#z0IwXI zxK`qaY<^kyZaxi0n7%9MGydqZR)t8xj<5poNuMDCqu@^RILR}GU|XM@k+qPmvG(An zKAJ)_E(c44)rwwi043aoKM<+$VZLDgPb++__GbgH?Tp#zpyL=<0KUWyq+zR_8f1$r z-a!1!N?cHnK1Zl&J10GjF31oy{A`g9jMFl(ZTEhZE&J6d7djQhG{<|N6}Pf4x~ijY-`)CL02U^0+; zDR|~e1DDBSh~8B;B@>d7l8_=yqHL8$1ljB&!1(3xJ?1nlch7}+6c*t?pjq>w;FF$% za%9wM4bo#c3EEbRy`K47<`uYKO2Z%hf6L|#G~yc9Zn5x;kw_c^G*7L>Al6|XzFt4X zkj=0v3N)+31JrJ*Y57??8cmc^cVQT)&SWc#4>)Ja7nQ7 z^OuAEOE{Kx&F3x-WAwIYpcH^IRwO@s4<&|F03x#9&Mlt@L0^za47Zqz!j7^W$!K}N zy@#;OGQg0-TShxVpRrHun=m4zE>Jy%G=gL%&MqZh;+!sT?Q_BTX=rH<)H2SdS@Zrs zyJbSQMq%@$d>1l%nw~Tt1(-6RW+TSzGW$5Ss6MnVE?`(c>K`e_d0mcua8%Z(`3o8j zdrAK)3HXc|`_xz@wRkr7J=uXFdUk#xb{+yVRbf&`x?t{7TZ!Nd>Wzl;9b%*q?kWk` z@16Tfz{bIIwt<8RX`2dN_So&U(yOk8vCGo{Lq`9Zj*H)Rp38#UUq34V8%Mw(R)Z-M62^$E zItAyFNc}W52!O0n=(l@PMPxh^S7tNxc`u%;5u=6=fUVc4h!HxhaP>_v$FJFV7y}I^ zAA!_xAl+!_;==dtSBn{Xw(ZK*ANh+XMi=t6c<`m~z6IzS*69iR#NXhxuLmUD513L0 zu9R#q*bMV#$wH4hu(b6{bCDBkO@^4okrE1T@{ptvG-~$noOLiKO_!($$;t67iPuRThom9XFnY>1Q%G}g(vpJ38hM5h-G6F;VwP% z1bmc{I}EbBBpP@kYDJYg@3CL?@XS2cK&1j2RRqz$!Tyu;MF+^V9xNKYvKY7sk;m^c z%cFPt+xhJlU%bD^=h%cz&@#6>DuR41Zo3jAT4=v6W;@j+31l!&c}&#kiOlTK*e$|G z8@RnP#E|j*5dV{AFC{p`6ZZ71m{T!gjLoD|?panOrnTm?6_Js2;Bd5doJ z4{W?~*6y3H%x|(k1+`wHv|MBJ8VHm}ClOEczm{bl0vg9?h`1JmI6VeMy4SOQee!fR z_(AmTaPXPSqW?!|`9u4w<~TPN$n|dl=+e)!lGp~>aV}t{wAuh@X(k0zjY)d4Ci|yi zJ-t@`TsoblyDpAbDk7RQw?m*#ZuF>Dz0l8IUP{kFIf;Zs_8KV`SN5ct`$z=wjmcZ} zUG;4Y9Qhw3Zxo!^BvAR>#;#SR3f#&r@pH|;2zNAy(ru>*RCcPa#gIE>X`ffXk(cS;u39948CK!(a1nWZN*{4=sPTpRo_{Nv znn-W%k+FMu#86%b22VW;aPd8`%{M&)R(GIcuFZhrO2ZiVii>uDlq@<8Xtb);b-S-F z`bE%Ngf_(Ip6ZVq1ERyP!bXDlcE@0j#;9;9Wr(D5YI8Pe&$aKk*;)JEi(%A)B7*Ec zlUD)nXwm=yeLsD$!72$N(68SlD@x=m$HmR(&~VMMbof(t$F7M-!Ejm9Jh6iRtgb;R z_L@o_s46mF3Mxd_xoIy>b$Fg^Lk=E}>|?se@-I+cjbKXKra(ysid|kQ0l~T4(S?w` zveM~8Z@zEelaLP%BD?z?_-KCn2}BtFTKl1F^&(c`rzFHK$G zi2wN}2%vSXldhGiiGv6@o{f-?>skKW6w6;CjLk#8xTj7mfxU&&f+C@@X`G$M_d zBMyR%0rQM*CmnV`0bhDJ0EWaf4*Po5+OmHDRIjbofwPd8v6*i@tfEjL?43ol4iFXUcfsU%CUjykx2ep3CzLRuxZTbz={W8eK_!@ zs9xizYC49_6IXY?bk`KPT>J(oM#SBOpdi_h4)aW*bTo0@=Y~faOzA!Keu4`J07^PX;J!qoR^x zPh|m_edj`_zGLm&F`{Ya>q*h01j;LL1?m#EW9DUWoGE#*uo95(F_#p{kb$`}jRZSl zXtxJ9E^ef&8;y3)zMR!-Q3g(!jh_}lug7#gql*@JMlM8OLfE=rKjS76MJH0eui*&2 z>BJv-gqBeTNV_d+1@v2=J^%t9ae(d2A~fVkt(L;Nlz@kzf}k=0UxywXVET8tuiQ&J z1pIB3P^EIFg%Zw&3_Z0tOJ}s80=Mbn@D{`=J4GTE*c-+;ZzZ2~n@RFb2?^avXfX~; zk>KHRR|D>v<#DGe{u2D<8pw!O2&~-(AFtWvOgK*za@}P;P3&NxMNoHil2uo~Xm{NnEJtNp3p}Sl1 zKJcS6W%*eD7Xj)!O2>G>(2Y@5pGK4ss2S^WCdDB^X&(W6(gcDPtRb9Z;V@Rom4Ayt z_Zdk^EWz0s|9BAx93PK~B8M)~;TVhj0io&}^pBxL=V`glC+ufm+`w)%CS+d&((TGO z_euh2=vw59v?4`O)1!_>IkT@}qR8g)gL66&WWTux>|XMb9pXSPs$qvk`CAKLw_LaPgNFci;@5{d(wkVgtat$DQyF2p_{tR z11x?g;C=7}E`*sFwuO~O{+hxw?{7Zm&(P&r8OC)%r${EIx0gp#Nfc&sktFI6BHOEy;@ep^LzC?Kz!h+3$~K9pYg z1{?O$K-yThDwrnt0E@TZD{Rbc5_>8WK4x_h{mZ}z0(Y3h`ql7jc6MY1 z1It^^%EWs@Reth~aiZGqX!+*^|K;G(et0O06p5v_!8`7=I5X!*k+2G~Vd?I$8|I0z zi;4(VrJwJKP}Aiw&m`b5i<`nZ`#iSW%dL6)r4M?(rj<(m3?o?FC8l?*i}H#~wO8Ug zi}3W2Q@|wNz^J54SWAefn3N<`lWg@_vEcrT<06x_-x#WBA+|E zCj<0^FPr;VKE2A&cyO|A2Z_i)&*rUlrN$P-)@rrWU}jRM4O|ei;u;%HWi|hMo+;lC zLf7^^1Wg<~YtOeuS$HsYkk68ymNFf5 zeLK0l%(Ix~LGHU0!meb6aBHlmWxQDH^ytmXgn2?|{1mAx+i&lq5pwyD2f5>CfF7pC z=a2bwLlI;2;qCqQ0_@(@!gHg+!V2+CHWxc>A!fa+y{FV_5ETp7tpOM-!rltKe%OUsj9D zv}d@t@jGGq1*KE&;h_0uVhJ5P*K2|xIkC|`yel($UTh+FsUZwKn{;{g^r&`?;+Dam zM~`A$kB$z%UB^Ekn$5u;&al7SX2ojZv#4WYQQ2U!>=Z`Yr7e7D!B;LFVSg?MH#?}_ zq+zxrbT|Lc+1rBWW|$z()PB@PTC>t<LwTiL|n9g`;#zlStzQ6ye ztRY#@s9QG3o*1w$-JULvef#B)$M-B@gG7Do!HNc!9F-R(&%M2Y z;T|pp1s@p=|H&Lb)fcy2uyghTRhmW-o5+Om&oSNyFE0LapNtHQ>A!j;752}yy2}6o zbZ2DnL!hhfpuX(zZgHr+nb9AF)Mt5yU?151Dg0#P@N*|}l9TA8G1)iTw{68D5A6$c zx8j$W`)qxZrb5QUu8t;kO(s3B(aQG;`NYls3GbSGu80X%grcUsX}Cq>{BIH7Noi8E zdYF#*3fyw5E*X9*?>+E7r{5SVZf@#;=H6ZJzWZiL6w1&$roh4>CSc7*TMmBx7g38t z4tDqba8W)aD9QY0wSJO21O>KV^pmGQtY~4FX*3W1 z_E7Ks9Uxdzg#ZlR{93!wKl^SRn|<<-`eaHh0kFKR!(%b6s}PuPW$^eN12QEE>&q=lGX3w=3c`nvgF0VhD`W7Dv_SwPOrTVBm`xb?J6# zD?PJL_OMGiPp51nUP>&^=z7P-T=(Py&p*I!j1)h81wmJ%cNHaAm|*g7w)^Q&Q{gVT z2nK4&3ej+Auu@)8YP@lw$NBp0cTj;^vh->to9{cEU#x-3%J(EE3h#?Kj!^0{r|u4b z#hYYq-K3hWZj4M__Koi>EK=Rx)b~m(;GqO~OJk}0h5S~zma+o<8%mmYV@wDxdwn>< z6DJ7hs-=SaE3C8EjS)qQ#3;GH&PoV;)i;sdPI6SIr2PLP{hbtGuNdmfik=T7Q_5>I za`h@;<^3RdeItS-|4%pvz6{y4}!hLhFo&!h-4 zIKnxktvHrS65^pUI67|%S!i@Uh%8H@x*CZXL6g(6dp~mHoFGwEpnBkcNWlP}V~8T#p-yYx1lyn9S?%lk+r&4+|B`AD-(8S_w>|z+nY&ycE;-hYG^aW@f?SKsG})Zx!rdH~z2@j^=$8Qk+E< zsh6dueocBfR>VO40jLQ4&L=Bp%!i3@@wmu)AjchKU8H`bEY=HK2#=p?U$wMEY{2yJ zs<`uzxp*!M>`Q}gEryemx$=|s^PP?`UBJsON^alky0-5~86Fbth#jVQiC##Y5HToG zKJp%!#Hea-PpF-*#!#{$;nLJ0uueNXZ^2G0#^4mEHxslo-TPiQTitG9SUVL~|H3?d zBWX+vsA`*jZ8CT0vzX?6tz**E+kw*^ZIAv6MHjlgLsNM>BOs7gVwQYde;YElZf}tI z3pbW@tNgT{`ZnaE-ge0{g1Zm#JlI=QJUW?7qAP*62>FGdHGQN00geF%87i;M z+_#48WVB7l*TmDZKkvxJPYD3X-Yd#TY6Y*f<=mN)h4N-WHdUoTJ4PS3@I{~5$$8$f zMX}q99!(&o^C6vdz;^4Uo{vSByiMMPSEubeFLQzOiY%!c!^nSc*i0^w3MthFt`7Ma z>4)-b=aq&rgwQE7?U>?7K{3~tWMj=b_8_wP-0 zf&0y`z`0Tmo+vlO08_az4$Z&K`J|-^dC0IloohzW!7MspRlRFrfXliRUsNp5{G_F@35*jQT*% zB_+k|U14PPw*KtN4ZfzFRle&m=ea>o>yfW{(h(5$1bQIk${Va+IPskOg^J{gmG}UE6bkPg~zQ-(T`!A!Wl(p79!=N@pYb_Qk;D&}(6=O=ho4 z@$7DryvqjX#4OayULD{@-xHQy?pT`*_ZHA`r<|Q=tk_;{ zs|%c(Krmy8QGyZk4A<&f05OghFoN+dr%I`V{xR)#1C|TGx(td?t8(h5?TDMW3AC}V z%X-2y(T!1MV;DOA@%PVOMZR*X;c-yGkFMfNW3m7EFxtxtzI173jANXB-W~1tYIa_f zCAek0-=|q%4$xNx^Ba*SNygJ%m*0$e2)xlE2XK}yk$A>ckZ-*$^1ARe6?GX*Cv!}m zZwIN?xO!<8t0aaCAsLv&rq)ERsIjW@*`Oryw zJHAj%89Q`y3>L=l@Lk6s)gyaV(&W$`i+ILFWo>zk&mv{ zt9e2zzeL}?EpgUKihY2*KZOC`f{hK|TZ~x(SCdPslVQ@fG?k8j;RH2iZxTfo@?-d5 zhzETRfBEh$fsa0ny_HqCB^M~RnQJ}_7N|w4gE9!$`{)A-G7TW~^_QR=YsNBBbSK@= zHb_R)5T}zEmjFH83VFXHCE?3x+qqj@u9RN6@wg-IL`WEuMtrX{5t0<0TT9+W_~YwB z4|^FwWQUa@jY@~84%L`Zts00uLM9N1bH0~!vv1wT*(zV;X`VN0D78Dyz63(?5RG=) zzFFtpqfaBFea11+y@NRKby44dNe-mu$mvrXgd}`PMii+GMqfNHG|I)mw**A369^Jg zg?D0vdm>J^!(N&tSnHZdM!y|(v8P8h@%$`YyxGg);Z3O-sN_-fHT5%*;xf%=i#A9# zeZFi9<8KMaKF}8L^6+-nPv<)geeY-2`(dv%bzi8G0l`0U+KbpED=QA67M$awOU9GA zdy*=H-Z%-&10_#Dk+pM65E#{@W2!tRb^*Gz0JE=VdEW8F$&o%5iUlIh@W9-`_`iMZ$Cz?&Zq%a6clgH0$a< zcm8_qr>hSWYMJB`$mw`8t-axAb5+}LNx{Q`#ObJch)k|Qe)a?+au}zS{yYd^AHm!< zM59Ib0EnOPQM>p}xqi!UsxQ|H-EaS_PbC|=J`dP=>zw?BJ36XV5Ss_5w=Kya6!-co zk9#gpr;dtx5;>dl5!t~sy#z;UPuoHhfxNf&q}9xP&0z-DrHsdn7LQx63gXz@BD^L` zdel1d9lBST=#oeRjgt#_OHaB`Xumef5{qyZoh-nA=+EECIG8)-Ahl;r)QNoeHMZFp z2jA`|^P1N=G7XOJTE-Deyf7?ikhqRW9su7qT|u)#pLo zZ-y`-kFDkmQ198p(=2e`^b+BSlLR(FtbKb30;35dHBmSrrBEVr30XXZu+me+$E=Jb z451dtw6f77F#<=e=tfw*E>%E*lF4(-1E%vbE}U|MFwOQQj&AEK$^L)F+leoVR6%#5 z=J$VeQS_zb4#iulr$@a?ejt?YOx>g`58^XwX1C4tIzYU(l4;ey-XEoyVz>j)J`T!( zG?D?CC|})#(@-c5V|ebVAV%?w$7-v%%K+6v9&WsRBzA?+)656Tm}U8Gut0V@FdGss zu_*<>H^E|UV5nzpFpFmC%DlzW4KoY#9!g|ENJBv&Glc+~%s?)1kwSxS56ZdO2kGTm;A zPHBw()vlBp(?2lyyrN!Yf#Sim?78tSwm_{u3PuF^7PTaa)~@@zYaFn%3`s|U#qj$SdRZK#$CA!|HG z@s{eosAiA}(V6Mk>^*tohE>@rcu9ff8Hr7??$F{-thQpSPcy1VnWGk|+Zv10A{|5U z9~errdfcf^gY<7(TbhlU-doRV#!}+&eai|kxE;IP4HlE+xyB9+Ah>S!nhaa_?YUhz z5M8peXF_>khJN?!4>qQmHqZth@9It#g>9*S+;HKf__s?%``}Q`N8qkFu*!D&#=BMr zZe_JP8qZKtcA9ck&Q${V`;2eIw}DR^u@YEVI_}UWI(3j8^azZG9hIt>pk||fgns9p z%+P3FOfRG-SNv%6KF9{|bJjkLhcp;U6j<-SdGs!g62P7>+uwUH9Z)Gyz437RHGK!o zPa1z6Z3&=tzzr>)iuU*StF7l#;8Vt2+)S}hTrrC0crHeXmCucQ`b17*Gvu7~>K)5_QB{D}-6xs)CeFA=xTDdKTPc~(xL({44<}Q=GVnaOjerSR zCmrB;{Gx%4J2gPtS$c#CBaFNM1;mrzs0rVGbaaxlwOOEBQ3qB<%6mV(pgA8w6#kJ{ zuefIVs3C0d_D6==KIM=`N3?zi>GZ|R4_&l{`qCC$gz%_f`qz?2Um{26sC3Qgv4@(R zxXQNY`UyB2R(kP{>OF7C42P9EgFzd(c$U+#i%ZHy=Tb7B0vcrKoXSG0kKz-X!WPo^ z=~-G zg?saM2r5Ur)d>6T^{9~hu@-g_`RR(s0~NRkau0Fi>vo@t__d7?Z^jp zL5X#pzm`AetI)EIG2YfJZ(PVxKQrmv=C~s9%RY*2HsEl`EY5FNhd)KV^KuFN5>hah zQqyt*DtTZVa#k+JErVxtzPn70f;7vLg|1R?eSNvs?{&Qk4r-~f>0Pf+>qY7b^xShu zixRm7LtUGx#^QZdwaZA4g!>@Xwvm}=Na)ySchg&yB!SN@< zv_s1VtP4VS`^Uq|ZS`K^)g!Op6^hYSYfg?0gA^4DEcu^NOo=ngzt}xaez}yCwY1fV zo$m39FILX-<-1Z!Y{vX%(>kT11T(B&i#|(h*r2}r{Z*fSDw^}md*d+YmE+X2L?g=A zyuJ56bXneyaw#M#oL;Mb)urm-*;fC$!}D2^+jon+;R7v`zfUGn(J64{9`@H4_ETSf zdB^10HQ#aGECh_143XSkGOvDOvf<4KGVEzNu6Fh}XIs7e>eg2;ypsv@I|rJ4=Vp94 zQ}mYg1nP`EJyPro3e$eiDgTRh344B7D{{JDN6rbW6l+wl{EI97hz&|fJ_J;b+k^hT_V zCI95MEYRn*esrX1OMQU8fQOqyiZ(X)ofm$tZ6A#`%xXKNH{GIY!bPN8F^MIFJYy)wHna!ss z>h>87Tq@3j1K^FHm%NA8P*dD~ce-3ss?-FtJ>?R0wSm~#h1FI@mTQGamQgRp>r^LG z3bt*;N@bFT*WA4t(|M-vqSYo`;W|@Haz+&q-rP?zv>m0^#$LsONCfg zBJ#}~Q}7Sr1h+zza+#^{i`zG%Jbya-K5AU2Yi@nFP^uN?0O!YVJ&%6;TJSBw8Oyu; z9w30sDwFz*UnBZOH9y;-F@4Y4BUx$6W&(VrVtkfO98~d)#(u8UMCh9-sT6-C;&IkR zU!7cDh2BhF{F^!au9fYc2tUYIyg~YG5KhLtZf-{^>fc z=kWzwFk9EaGwHk<`{pSW6Qt^+CYCtA!tU8#tMJCYhunu-xc3Z@$FSOl^Ek0|%;flo zHK(@P*BIsOm`Xke==uTS3stbeJV|Nl=I12m-bBX&xNei}vr^zZHigRJE53!_#>%2# zGX>l;4HFwRuz_FB{JajALNBew&`m8JgLlD(9!OG)W`+^#3qt~44`e%q?utqOO5Q|4 zkbAzkaOVT0B<#HV(}{JT_JVhzs+m3btH9Kzc!kBWv9S9dl?!WMz(GEBS5k$Z-T{r= zm%<_5mujrvIVf)G6^EQNag>iam%QHM4F^*2XisEw?!;FOjGO6U+WFKxeBw|W#@vR= zvA3F@ej9lTzs;oQc^p_K_;YLY-GFl1#>S>cSi8X)>7LdM-VdQOo2P%A_9`sl8fau+uO)J_^pV|a4vizzpm`aBCq^B>6lDCZiO Date: Fri, 25 Oct 2024 15:40:55 +0800 Subject: [PATCH 102/102] docs(s3/param): new default values from s3 params --- docs/zh/08-operation/12-multi.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/zh/08-operation/12-multi.md b/docs/zh/08-operation/12-multi.md index a5608ad5fa..bb3326cf3e 100644 --- a/docs/zh/08-operation/12-multi.md +++ b/docs/zh/08-operation/12-multi.md @@ -76,7 +76,7 @@ dataDir /mnt/data6 2 0 |s3UploadDelaySec | data 文件持续多长时间不再变动后上传至 s3,单位:秒。最小值:1;最大值:2592000 (30天),默认值 60 秒 | |s3PageCacheSize |s3 page cache 缓存页数目,单位:页。最小值:4;最大值:1024*1024\*1024。 ,默认值 4096| |s3MigrateIntervalSec | 本地数据文件自动上传 S3 的触发周期,单位为秒。最小值:600;最大值:100000。默认值 3600 | -|s3MigrateEnabled | 是否自动进行 S3 迁移,默认值为 1,表示开启自动 S3 迁移,可配置为 0。 | +|s3MigrateEnabled | 是否自动进行 S3 迁移,默认值为 0,表示关闭自动 S3 迁移,可配置为 1。 | ### 检查配置参数可用性 @@ -108,9 +108,9 @@ s3migrate database ; | # | 参数 | 默认值 | 最小值 | 最大值 | 描述 | | :--- | :----------- | :----- | :----- | :------ | :----------------------------------------------------------- | -| 1 | s3_keeplocal | 3650 | 1 | 365000 | 数据在本地保留的天数,即 data 文件在本地磁盘保留多长时间后可以上传到 S3。默认单位:天,支持 m(分钟)、h(小时)和 d(天)三个单位 | +| 1 | s3_keeplocal | 365 | 1 | 365000 | 数据在本地保留的天数,即 data 文件在本地磁盘保留多长时间后可以上传到 S3。默认单位:天,支持 m(分钟)、h(小时)和 d(天)三个单位 | | 2 | s3_chunksize | 262144 | 131072 | 1048576 | 上传对象的大小阈值,与 TSDB_PAGESIZE 参数一样,不可修改,单位为 TSDB 页 | -| 3 | s3_compact | 0 | 0 | 1 | TSDB 文件组首次上传 S3 时,是否自动进行 compact 操作。 | +| 3 | s3_compact | 1 | 0 | 1 | TSDB 文件组首次上传 S3 时,是否自动进行 compact 操作。 | ## Azure Blob 存储 本节介绍在 TDengine Enterprise 如何使用微软 Azure Blob 对象存储。本功能是上一小节‘对象存储’功能的扩展,需额外依赖 Flexify 服务提供的 S3 网关。通过适当的参数配置,可以把大部分较冷的时序数据存储到 Azure Blob 服务中。