Merge remote-tracking branch 'origin/3.0' into feat/TD-30268

This commit is contained in:
dapan1121 2024-10-24 16:49:38 +08:00
commit 4f3484b823
1318 changed files with 92756 additions and 39943 deletions

25
.gitignore vendored
View File

@ -100,6 +100,9 @@ tests/examples/JDBC/JDBCDemo/.project
tests/examples/JDBC/JDBCDemo/.settings/
source/libs/parser/inc/sql.*
tests/script/tmqResult.txt
tests/system-test/case_to_run.txt
tests/develop-test/case_to_run.txt
tests/scripts/case_to_run.txt
tests/tmqResult.txt
tests/script/jenkins/basic.txt
@ -118,6 +121,7 @@ TAGS
contrib/*
!contrib/CMakeLists.txt
!contrib/test
!contrib/azure-cmake
sql
debug*/
.env
@ -135,3 +139,24 @@ tags
*CMakeCache*
*CMakeFiles*
.history/
*.txt
*.tcl
*.pc
contrib/geos
contrib/libuv
contrib/pcre2
contrib/zlib
deps_tmp_CMakeLists.txt.in
*.a
*.ctest
pcre2-config
pcre2_test.sh
pcre2_grep_test.sh
pcre2_chartables.c
geos-config
config.h
pcre2.h
zconf.h
version.h
geos_c.h

View File

@ -1,13 +1,13 @@
cmake_minimum_required(VERSION 3.0)
project(
TDengine
VERSION 3.0
DESCRIPTION "An open-source big data platform designed and optimized for the Internet of Things(IOT)"
TDengine
VERSION 3.0
DESCRIPTION "An open-source big data platform designed and optimized for the Internet of Things(IOT)"
)
if (NOT DEFINED TD_SOURCE_DIR)
set( TD_SOURCE_DIR ${PROJECT_SOURCE_DIR} )
if(NOT DEFINED TD_SOURCE_DIR)
set(TD_SOURCE_DIR ${PROJECT_SOURCE_DIR})
endif()
SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
@ -15,13 +15,11 @@ SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
set(TD_SUPPORT_DIR "${TD_SOURCE_DIR}/cmake")
set(TD_CONTRIB_DIR "${TD_SOURCE_DIR}/contrib")
include(${TD_SUPPORT_DIR}/cmake.platform)
include(${TD_SUPPORT_DIR}/cmake.define)
include(${TD_SUPPORT_DIR}/cmake.options)
include(${TD_SUPPORT_DIR}/cmake.version)
# contrib
add_subdirectory(contrib)
@ -33,8 +31,8 @@ target_include_directories(api INTERFACE "include/client")
# src
if(${BUILD_TEST})
include(CTest)
enable_testing()
include(CTest)
enable_testing()
endif(${BUILD_TEST})
add_subdirectory(source)
@ -44,5 +42,5 @@ add_subdirectory(examples/c)
add_subdirectory(tests)
include(${TD_SUPPORT_DIR}/cmake.install)
# docs
# docs
add_subdirectory(docs/doxgen)

View File

@ -306,9 +306,9 @@ def pre_test_build_win() {
cd %WIN_CONNECTOR_ROOT%
python.exe -m pip install --upgrade pip
python -m pip uninstall taospy -y
python -m pip install taospy==2.7.13
python -m pip install taospy==2.7.16
python -m pip uninstall taos-ws-py -y
python -m pip install taos-ws-py==0.3.1
python -m pip install taos-ws-py==0.3.3
xcopy /e/y/i/f %WIN_INTERNAL_ROOT%\\debug\\build\\lib\\taos.dll C:\\Windows\\System32
'''
return 1
@ -422,6 +422,14 @@ pipeline {
mkdir -p ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}
echo "''' + env.FILE_CHANGED + '''" > ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}/docs_changed.txt
'''
sh '''
cd ${WKC}/tests/parallel_test
./run_check_assert_container.sh -d ${WKDIR}
'''
sh '''
cd ${WKC}/tests/parallel_test
./run_check_void_container.sh -d ${WKDIR}
'''
sh '''
date
rm -rf ${WKC}/debug

View File

@ -5,8 +5,9 @@ ExternalProject_Add(addr2line
GIT_TAG master
SOURCE_DIR "${TD_CONTRIB_DIR}/addr2line"
BINARY_DIR "${TD_CONTRIB_DIR}/addr2line"
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -11,9 +11,10 @@ ExternalProject_Add(aprutil-1
BUILD_IN_SOURCE TRUE
BUILD_ALWAYS 1
#UPDATE_COMMAND ""
CONFIGURE_COMMAND ./configure --prefix=$ENV{HOME}/.cos-local.1/ --with-apr=$ENV{HOME}/.cos-local.1
CONFIGURE_COMMAND ./configure --prefix=$ENV{HOME}/.cos-local.1/ --with-apr=$ENV{HOME}/.cos-local.1
#CONFIGURE_COMMAND ./configure --with-apr=/usr/local/apr
BUILD_COMMAND make
INSTALL_COMMAND make install
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -16,4 +16,5 @@ ExternalProject_Add(apr-1
BUILD_COMMAND make
INSTALL_COMMAND make install
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -0,0 +1,15 @@
# azure
ExternalProject_Add(azure
URL https://github.com/Azure/azure-sdk-for-cpp/archive/refs/tags/azure-storage-blobs_12.13.0-beta.1.tar.gz
URL_HASH SHA256=3eca486fd60e3522d0a633025ecd652a71515b1e944799b2e8ee31fd590305a9
DOWNLOAD_NO_PROGRESS 1
DOWNLOAD_DIR "${TD_CONTRIB_DIR}/deps-download"
SOURCE_DIR "${TD_CONTRIB_DIR}/azure-sdk-for-cpp-azure-storage-blobs_12.13.0-beta.1"
#BUILD_IN_SOURCE TRUE
#BUILD_ALWAYS 1
#UPDATE_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(cjson
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -2,7 +2,7 @@ cmake_minimum_required(VERSION 3.0)
set(CMAKE_VERBOSE_MAKEFILE FALSE)
set(TD_BUILD_TAOSA_INTERNAL FALSE)
#set output directory
# set output directory
SET(LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}/build/lib)
SET(EXECUTABLE_OUTPUT_PATH ${PROJECT_BINARY_DIR}/build/bin)
SET(TD_TESTS_OUTPUT_DIR ${PROJECT_BINARY_DIR}/test)
@ -12,193 +12,251 @@ MESSAGE(STATUS "Project binary files output path: " ${PROJECT_BINARY_DIR})
MESSAGE(STATUS "Project executable files output path: " ${EXECUTABLE_OUTPUT_PATH})
MESSAGE(STATUS "Project library files output path: " ${LIBRARY_OUTPUT_PATH})
if (NOT DEFINED TD_GRANT)
SET(TD_GRANT FALSE)
endif()
IF(NOT DEFINED TD_GRANT)
SET(TD_GRANT FALSE)
ENDIF()
IF (NOT DEFINED BUILD_WITH_RAND_ERR)
SET(BUILD_WITH_RAND_ERR FALSE)
ELSE ()
SET(BUILD_WITH_RAND_ERR TRUE)
endif()
IF(NOT DEFINED BUILD_WITH_RAND_ERR)
SET(BUILD_WITH_RAND_ERR FALSE)
ELSE()
SET(BUILD_WITH_RAND_ERR TRUE)
ENDIF()
IF ("${WEBSOCKET}" MATCHES "true")
IF("${WEBSOCKET}" MATCHES "true")
SET(TD_WEBSOCKET TRUE)
MESSAGE("Enable websocket")
ADD_DEFINITIONS(-DWEBSOCKET)
ELSE ()
ELSE()
SET(TD_WEBSOCKET FALSE)
ENDIF ()
ENDIF()
IF ("${BUILD_HTTP}" STREQUAL "")
IF (TD_LINUX)
IF (TD_ARM_32)
SET(TD_BUILD_HTTP TRUE)
ELSE ()
SET(TD_BUILD_HTTP TRUE)
ENDIF ()
ELSEIF (TD_DARWIN)
IF("${BUILD_HTTP}" STREQUAL "")
IF(TD_LINUX)
IF(TD_ARM_32)
SET(TD_BUILD_HTTP TRUE)
ELSE()
SET(TD_BUILD_HTTP TRUE)
ENDIF()
ELSEIF(TD_DARWIN)
SET(TD_BUILD_HTTP TRUE)
ELSE()
SET(TD_BUILD_HTTP TRUE)
ENDIF()
ELSEIF(${BUILD_HTTP} MATCHES "false")
SET(TD_BUILD_HTTP FALSE)
ELSEIF(${BUILD_HTTP} MATCHES "true")
SET(TD_BUILD_HTTP TRUE)
ELSE ()
ELSEIF(${BUILD_HTTP} MATCHES "internal")
SET(TD_BUILD_HTTP FALSE)
SET(TD_BUILD_TAOSA_INTERNAL TRUE)
ELSE()
SET(TD_BUILD_HTTP TRUE)
ENDIF ()
ELSEIF (${BUILD_HTTP} MATCHES "false")
SET(TD_BUILD_HTTP FALSE)
ELSEIF (${BUILD_HTTP} MATCHES "true")
SET(TD_BUILD_HTTP TRUE)
ELSEIF (${BUILD_HTTP} MATCHES "internal")
SET(TD_BUILD_HTTP FALSE)
SET(TD_BUILD_TAOSA_INTERNAL TRUE)
ELSE ()
SET(TD_BUILD_HTTP TRUE)
ENDIF ()
ENDIF()
IF (TD_BUILD_HTTP)
ADD_DEFINITIONS(-DHTTP_EMBEDDED)
ENDIF ()
IF(TD_BUILD_HTTP)
ADD_DEFINITIONS(-DHTTP_EMBEDDED)
ENDIF()
IF ("${BUILD_TOOLS}" STREQUAL "")
IF (TD_LINUX)
IF (TD_ARM_32)
SET(BUILD_TOOLS "false")
ELSEIF (TD_ARM_64)
SET(BUILD_TOOLS "false")
ELSE ()
SET(BUILD_TOOLS "false")
ENDIF ()
ELSEIF (TD_DARWIN)
SET(BUILD_TOOLS "false")
ELSE ()
SET(BUILD_TOOLS "false")
ENDIF ()
ENDIF ()
IF("${BUILD_TOOLS}" STREQUAL "")
IF(TD_LINUX)
IF(TD_ARM_32)
SET(BUILD_TOOLS "false")
ELSEIF(TD_ARM_64)
SET(BUILD_TOOLS "false")
ELSE()
SET(BUILD_TOOLS "false")
ENDIF()
ELSEIF(TD_DARWIN)
SET(BUILD_TOOLS "false")
ELSE()
SET(BUILD_TOOLS "false")
ENDIF()
ENDIF()
IF ("${BUILD_TOOLS}" MATCHES "false")
IF("${BUILD_TOOLS}" MATCHES "false")
MESSAGE("${Yellow} Will _not_ build taos_tools! ${ColourReset}")
SET(TD_TAOS_TOOLS FALSE)
ELSE ()
ELSE()
MESSAGE("")
MESSAGE("${Green} Will build taos_tools! ${ColourReset}")
MESSAGE("")
SET(TD_TAOS_TOOLS TRUE)
ENDIF ()
ENDIF()
IF (${TD_WINDOWS})
IF(${TD_WINDOWS})
SET(TAOS_LIB taos_static)
ELSE ()
ELSE()
SET(TAOS_LIB taos)
ENDIF ()
ENDIF()
# build TSZ by default
IF ("${TSZ_ENABLED}" MATCHES "false")
set(VAR_TSZ "" CACHE INTERNAL "global variant empty" )
IF("${TSZ_ENABLED}" MATCHES "false")
set(VAR_TSZ "" CACHE INTERNAL "global variant empty")
ELSE()
# define add
MESSAGE(STATUS "build with TSZ enabled")
ADD_DEFINITIONS(-DTD_TSZ)
set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz" )
# define add
MESSAGE(STATUS "build with TSZ enabled")
ADD_DEFINITIONS(-DTD_TSZ)
set(VAR_TSZ "TSZ" CACHE INTERNAL "global variant tsz")
ENDIF()
# force set all platform to JEMALLOC_ENABLED = false
SET(JEMALLOC_ENABLED OFF)
IF (TD_WINDOWS)
MESSAGE("${Yellow} set compiler flag for Windows! ${ColourReset}")
IF (${CMAKE_BUILD_TYPE} MATCHES "Release")
MESSAGE("${Green} will build Release version! ${ColourReset}")
SET(COMMON_FLAGS "/W3 /D_WIN32 /DWIN32 /Zi- /O2 /GL /MD")
ELSE ()
MESSAGE("${Green} will build Debug version! ${ColourReset}")
SET(COMMON_FLAGS "/w /D_WIN32 /DWIN32 /Zi /MTd")
IF(TD_WINDOWS)
MESSAGE("${Yellow} set compiler flag for Windows! ${ColourReset}")
IF(${CMAKE_BUILD_TYPE} MATCHES "Release")
MESSAGE("${Green} will build Release version! ${ColourReset}")
SET(COMMON_FLAGS "/W3 /D_WIN32 /DWIN32 /Zi- /O2 /GL /MD")
ELSE()
MESSAGE("${Green} will build Debug version! ${ColourReset}")
SET(COMMON_FLAGS "/w /D_WIN32 /DWIN32 /Zi /MTd")
ENDIF()
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /MANIFEST:NO")
# IF (MSVC AND (MSVC_VERSION GREATER_EQUAL 1900))
# SET(COMMON_FLAGS "${COMMON_FLAGS} /Wv:18")
# SET(COMMON_FLAGS "${COMMON_FLAGS} /Wv:18")
# ENDIF ()
IF (CMAKE_DEPFILE_FLAGS_C)
IF(CMAKE_DEPFILE_FLAGS_C)
SET(CMAKE_DEPFILE_FLAGS_C "")
ENDIF ()
IF (CMAKE_DEPFILE_FLAGS_CXX)
ENDIF()
IF(CMAKE_DEPFILE_FLAGS_CXX)
SET(CMAKE_DEPFILE_FLAGS_CXX "")
ENDIF ()
IF (CMAKE_C_FLAGS_DEBUG)
ENDIF()
IF(CMAKE_C_FLAGS_DEBUG)
SET(CMAKE_C_FLAGS_DEBUG "" CACHE STRING "" FORCE)
ENDIF ()
IF (CMAKE_CXX_FLAGS_DEBUG)
ENDIF()
IF(CMAKE_CXX_FLAGS_DEBUG)
SET(CMAKE_CXX_FLAGS_DEBUG "" CACHE STRING "" FORCE)
ENDIF ()
ENDIF()
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMMON_FLAGS}")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMMON_FLAGS}")
ELSE ()
IF (${TD_DARWIN})
ELSE()
IF(${TD_DARWIN})
set(CMAKE_MACOSX_RPATH 0)
ENDIF ()
IF (${COVER} MATCHES "true")
ENDIF()
IF(${COVER} MATCHES "true")
MESSAGE(STATUS "Test coverage mode, add extra flags")
SET(GCC_COVERAGE_COMPILE_FLAGS "-fprofile-arcs -ftest-coverage")
SET(GCC_COVERAGE_LINK_FLAGS "-lgcov --coverage")
SET(GCC_COVERAGE_LINK_FLAGS "-lgcov --coverage")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} ${GCC_COVERAGE_LINK_FLAGS}")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${GCC_COVERAGE_COMPILE_FLAGS} ${GCC_COVERAGE_LINK_FLAGS}")
ENDIF ()
ENDIF()
# disable all assert
IF ((${DISABLE_ASSERT} MATCHES "true") OR (${DISABLE_ASSERTS} MATCHES "true"))
IF((${DISABLE_ASSERT} MATCHES "true") OR(${DISABLE_ASSERTS} MATCHES "true"))
ADD_DEFINITIONS(-DDISABLE_ASSERT)
MESSAGE(STATUS "Disable all asserts")
ENDIF()
INCLUDE(CheckCCompilerFlag)
IF (TD_ARM_64 OR TD_ARM_32)
IF(TD_ARM_64 OR TD_ARM_32)
SET(COMPILER_SUPPORT_SSE42 false)
ELSEIF (("${CMAKE_C_COMPILER_ID}" MATCHES "Clang") OR ("${CMAKE_C_COMPILER_ID}" MATCHES "AppleClang"))
ELSEIF(("${CMAKE_C_COMPILER_ID}" MATCHES "Clang") OR("${CMAKE_C_COMPILER_ID}" MATCHES "AppleClang"))
SET(COMPILER_SUPPORT_SSE42 true)
MESSAGE(STATUS "Always enable sse4.2 for Clang/AppleClang")
ELSE()
CHECK_C_COMPILER_FLAG("-msse4.2" COMPILER_SUPPORT_SSE42)
ENDIF()
CHECK_C_COMPILER_FLAG("-mfma" COMPILER_SUPPORT_FMA)
CHECK_C_COMPILER_FLAG("-mavx" COMPILER_SUPPORT_AVX)
CHECK_C_COMPILER_FLAG("-mavx2" COMPILER_SUPPORT_AVX2)
CHECK_C_COMPILER_FLAG("-mavx512f" COMPILER_SUPPORT_AVX512F)
CHECK_C_COMPILER_FLAG("-mavx512vbmi" COMPILER_SUPPORT_AVX512BMI)
CHECK_C_COMPILER_FLAG("-mavx512vl" COMPILER_SUPPORT_AVX512VL)
IF(TD_ARM_64 OR TD_ARM_32)
SET(COMPILER_SUPPORT_FMA false)
SET(COMPILER_SUPPORT_AVX false)
SET(COMPILER_SUPPORT_AVX2 false)
SET(COMPILER_SUPPORT_AVX512F false)
SET(COMPILER_SUPPORT_AVX512BMI false)
SET(COMPILER_SUPPORT_AVX512VL false)
ELSE()
CHECK_C_COMPILER_FLAG("-mfma" COMPILER_SUPPORT_FMA)
CHECK_C_COMPILER_FLAG("-mavx512f" COMPILER_SUPPORT_AVX512F)
CHECK_C_COMPILER_FLAG("-mavx512vbmi" COMPILER_SUPPORT_AVX512BMI)
CHECK_C_COMPILER_FLAG("-mavx512vl" COMPILER_SUPPORT_AVX512VL)
IF (COMPILER_SUPPORT_SSE42)
INCLUDE(CheckCSourceRuns)
SET(CMAKE_REQUIRED_FLAGS "-mavx")
check_c_source_runs("
#include <immintrin.h>
int main() {
__m256d a, b, c;
double buf[4] = {0};
a = _mm256_loadu_pd(buf);
b = _mm256_loadu_pd(buf);
c = _mm256_add_pd(a, b);
_mm256_storeu_pd(buf, c);
for (int i = 0; i < sizeof(buf) / sizeof(buf[0]); ++i) {
IF (buf[i] != 0) {
return 1;
}
}
return 0;
}
" COMPILER_SUPPORT_AVX)
SET(CMAKE_REQUIRED_FLAGS "-mavx2")
check_c_source_runs("
#include <immintrin.h>
int main() {
__m256i a, b, c;
int buf[8] = {0};
a = _mm256_loadu_si256((__m256i *)buf);
b = _mm256_loadu_si256((__m256i *)buf);
c = _mm256_and_si256(a, b);
_mm256_storeu_si256((__m256i *)buf, c);
for (int i = 0; i < sizeof(buf) / sizeof(buf[0]); ++i) {
IF (buf[i] != 0) {
return 1;
}
}
return 0;
}
" COMPILER_SUPPORT_AVX2)
ENDIF()
IF(COMPILER_SUPPORT_SSE42)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -msse4.2")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2")
ENDIF()
IF ("${SIMD_SUPPORT}" MATCHES "true")
IF (COMPILER_SUPPORT_FMA)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mfma")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfma")
IF("${SIMD_SUPPORT}" MATCHES "true")
IF(COMPILER_SUPPORT_FMA)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mfma")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mfma")
MESSAGE(STATUS "FMA instructions is ACTIVATED")
ENDIF()
IF (COMPILER_SUPPORT_AVX)
IF(COMPILER_SUPPORT_AVX)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx")
MESSAGE(STATUS "AVX instructions is ACTIVATED")
ENDIF()
IF (COMPILER_SUPPORT_AVX2)
IF(COMPILER_SUPPORT_AVX2)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx2")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx2")
MESSAGE(STATUS "AVX2 instructions is ACTIVATED")
ENDIF()
MESSAGE(STATUS "SIMD instructions (FMA/AVX/AVX2) is ACTIVATED")
ENDIF()
IF ("${SIMD_AVX512_SUPPORT}" MATCHES "true")
IF (COMPILER_SUPPORT_AVX512F AND COMPILER_SUPPORT_AVX512BMI)
IF("${SIMD_AVX512_SUPPORT}" MATCHES "true")
IF(COMPILER_SUPPORT_AVX512F AND COMPILER_SUPPORT_AVX512BMI)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx512f -mavx512vbmi")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx512f -mavx512vbmi")
MESSAGE(STATUS "avx512f/avx512bmi enabled by compiler")
ENDIF()
IF (COMPILER_SUPPORT_AVX512VL)
IF(COMPILER_SUPPORT_AVX512VL)
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -mavx512vl")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mavx512vl")
MESSAGE(STATUS "avx512vl enabled by compiler")
MESSAGE(STATUS "avx512vl enabled by compiler")
ENDIF()
ENDIF()
@ -206,16 +264,17 @@ ELSE ()
SET(CMAKE_C_FLAGS_REL "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -O3 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k")
SET(CMAKE_CXX_FLAGS_REL "${CMAKE_CXX_FLAGS} -Werror -Wno-reserved-user-defined-literal -Wno-literal-suffix -Werror=return-type -fPIC -O3 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k")
IF (${BUILD_SANITIZER})
IF(${BUILD_SANITIZER})
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0")
MESSAGE(STATUS "Compile with Address Sanitizer!")
ELSEIF (${BUILD_RELEASE})
# SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize=undefined -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -fsanitize=address -fsanitize-recover=all -fsanitize=float-divide-by-zero -fsanitize=float-cast-overflow -fno-sanitize=shift-base -fno-sanitize=alignment -g3 -Wformat=0")
MESSAGE(STATUS "Compile with Address Sanitizer!")
ELSEIF(${BUILD_RELEASE})
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}")
ELSE ()
ELSE()
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Werror -Werror=return-type -fPIC -g3 -gdwarf-2 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-reserved-user-defined-literal -g3 -Wno-literal-suffix -Werror=return-type -fPIC -gdwarf-2 -Wformat=2 -Wno-format-nonliteral -Wno-format-truncation -Wno-format-y2k")
ENDIF ()
ENDIF ()
ENDIF()
ENDIF()

View File

@ -7,7 +7,17 @@ ADD_CUSTOM_COMMAND(OUTPUT ${PREPARE_ENV_CMD}
COMMAND ${CMAKE_COMMAND} -E make_directory ${TD_TESTS_OUTPUT_DIR}/cfg/
COMMAND ${CMAKE_COMMAND} -E make_directory ${TD_TESTS_OUTPUT_DIR}/log/
COMMAND ${CMAKE_COMMAND} -E make_directory ${TD_TESTS_OUTPUT_DIR}/data/
COMMAND ${CMAKE_COMMAND} -E echo dataDir ${TD_TESTS_OUTPUT_DIR}/data > ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo firstEp localhost:6030 > ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo fqdn localhost >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo serverPort 6030 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo debugFlag 135 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo asyncLog 0 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo supportVnodes 1024 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo numOfLogLines 300000000 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo logKeepDays -1 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo checkpointInterval 60 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo snodeAddress 127.0.0.1:873 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo dataDir ${TD_TESTS_OUTPUT_DIR}/data >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo logDir ${TD_TESTS_OUTPUT_DIR}/log >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo charset UTF-8 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
COMMAND ${CMAKE_COMMAND} -E echo monitor 0 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg

View File

@ -144,6 +144,12 @@ option(
OFF
)
option(
BUILD_WITH_ANALYSIS
"If build with analysis"
ON
)
ENDIF ()
IF(NOT TD_ENTERPRISE)
@ -151,8 +157,15 @@ MESSAGE("switch s3 off with community version")
set(BUILD_S3 OFF)
set(BUILD_WITH_S3 OFF)
set(BUILD_WITH_COS OFF)
set(BUILD_WITH_ANALYSIS OFF)
ENDIF ()
IF(${BUILD_WITH_ANALYSIS})
message("build with analysis")
set(BUILD_S3 ON)
set(BUILD_WITH_S3 ON)
ENDIF()
IF(${BUILD_S3})
IF(${BUILD_WITH_S3})

View File

@ -2,7 +2,7 @@
IF (DEFINED VERNUMBER)
SET(TD_VER_NUMBER ${VERNUMBER})
ELSE ()
SET(TD_VER_NUMBER "3.3.3.0.alpha")
SET(TD_VER_NUMBER "3.3.4.0.alpha")
ENDIF ()
IF (DEFINED VERCOMPATIBLE)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(cos
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -5,8 +5,9 @@ ExternalProject_Add(crashdump
GIT_TAG master
SOURCE_DIR "${TD_CONTRIB_DIR}/crashdump"
BINARY_DIR "${TD_CONTRIB_DIR}/crashdump"
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -16,4 +16,5 @@ ExternalProject_Add(curl2
BUILD_COMMAND make -j
INSTALL_COMMAND make install
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(geos
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -10,4 +10,5 @@ ExternalProject_Add(gnuregex
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(googletest
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(iconv
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -5,8 +5,9 @@ ExternalProject_Add(libdwarf
GIT_TAG libdwarf-0.3.1
SOURCE_DIR "${TD_CONTRIB_DIR}/libdwarf"
BINARY_DIR "${TD_CONTRIB_DIR}/libdwarf"
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -13,4 +13,5 @@ ExternalProject_Add(libs3
BUILD_COMMAND make build/lib/libs3.a
INSTALL_COMMAND make install_static
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -5,8 +5,9 @@ ExternalProject_Add(libuv
GIT_TAG v1.48.0
SOURCE_DIR "${TD_CONTRIB_DIR}/libuv"
BINARY_DIR "${TD_CONTRIB_DIR}/libuv"
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -9,4 +9,5 @@ ExternalProject_Add(lz4
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -1,9 +1,9 @@
# xz
# xz
if (${TD_LINUX})
ExternalProject_Add(lzma2
GIT_REPOSITORY https://github.com/conor42/fast-lzma2.git
GIT_REPOSITORY https://github.com/conor42/fast-lzma2.git
SOURCE_DIR "${TD_CONTRIB_DIR}/lzma2"
#BINARY_DIR ""
BUILD_IN_SOURCE TRUE
@ -11,5 +11,6 @@ ExternalProject_Add(lzma2
BUILD_COMMAND make
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)
endif()
endif()

View File

@ -10,4 +10,5 @@ ExternalProject_Add(msvcregex
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -11,4 +11,5 @@ ExternalProject_Add(mxml
BUILD_COMMAND make
INSTALL_COMMAND make install
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -10,4 +10,5 @@ ExternalProject_Add(pcre2
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -10,4 +10,5 @@ ExternalProject_Add(pthread
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -11,6 +11,7 @@ if (${BUILD_CONTRIB})
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)
else()
if (NOT ${TD_LINUX})
@ -24,6 +25,7 @@ else()
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)
endif()
endif()

View File

@ -10,4 +10,5 @@ ExternalProject_Add(sqlite
BUILD_COMMAND "$(MAKE)"
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -12,4 +12,5 @@ ExternalProject_Add(openssl
BUILD_COMMAND make -j
INSTALL_COMMAND make install_sw -j
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -2,12 +2,12 @@
# stub
ExternalProject_Add(stub
GIT_REPOSITORY https://github.com/coolxv/cpp-stub.git
GIT_TAG 5e903b8e
GIT_TAG 3137465194014d66a8402941e80d2bccc6346f51
GIT_SUBMODULES "src"
SOURCE_DIR "${TD_CONTRIB_DIR}/cpp-stub"
BINARY_DIR "${TD_CONTRIB_DIR}/cpp-stub/src"
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
TEST_COMMAND ""
)

View File

@ -2,7 +2,7 @@
# taos-tools
ExternalProject_Add(taos-tools
GIT_REPOSITORY https://github.com/taosdata/taos-tools.git
GIT_TAG 3.0
GIT_TAG main
SOURCE_DIR "${TD_SOURCE_DIR}/tools/taos-tools"
BINARY_DIR ""
#BUILD_IN_SOURCE TRUE

View File

@ -10,4 +10,5 @@ ExternalProject_Add(wcwidth
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -10,4 +10,5 @@ ExternalProject_Add(wingetopt
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -15,4 +15,5 @@ ExternalProject_Add(xml2
BUILD_COMMAND make -j
INSTALL_COMMAND make install && ln -sf $ENV{HOME}/.cos-local.2/include/libxml2/libxml $ENV{HOME}/.cos-local.2/include/libxml
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -1,14 +1,15 @@
# xz
ExternalProject_Add(xz
GIT_REPOSITORY https://github.com/xz-mirror/xz.git
GIT_TAG v5.4.4
GIT_REPOSITORY https://github.com/xz-mirror/xz.git
GIT_TAG v5.4.4
SOURCE_DIR "${TD_CONTRIB_DIR}/xz"
BINARY_DIR ""
#BUILD_IN_SOURCE TRUE
CMAKE_ARGS
CMAKE_ARGS
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
)
GIT_SHALLOW true
)

View File

@ -12,4 +12,5 @@ ExternalProject_Add(zlib
BUILD_COMMAND ""
INSTALL_COMMAND ""
TEST_COMMAND ""
GIT_SHALLOW true
)

View File

@ -1,15 +1,16 @@
# zstb
# zstb
#ExternalProject_Add(zstd
#GIT_REPOSITORY https://github.com/facebook/zstd.git
#GIT_REPOSITORY https://github.com/facebook/zstd.git
#GIT_TAG v1.5.5
#SOURCE_DIR "${TD_CONTRIB_DIR}/zstd"
#DOWNLOAD_DIR "${TD_CONTRIB_DIR}/deps-download"
#BINARY_DIR ""
#CMAKE_COMMAND
#CMAKE_COMMAND
#CMAKE_ARGS ${TD_CONTRIB_DIR}/zstd/build/cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_ARCHIVE_OUTPUT_DIRECTORY=${CMAKE_BINARY_DIR}/build/lib
#BUILD_COMMAND make -j4
#BUILD_COMMAND make -j4
#INSTALL_COMMAND ""
#TEST_COMMAND ""
#)
#GIT_SHALLOW true
#)

View File

@ -10,39 +10,36 @@ if(${BUILD_WITH_S3})
file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.2/)
elseif(${BUILD_WITH_COS})
set(CONTRIB_TMP_FILE3 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in3")
configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
set(CONTRIB_TMP_FILE3 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in3")
configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
if(${BUILD_WITH_COS})
file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.1/)
cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
endif(${BUILD_WITH_COS})
if(${BUILD_WITH_COS})
file(MAKE_DIRECTORY $ENV{HOME}/.cos-local.1/)
cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE3})
endif(${BUILD_WITH_COS})
configure_file(${CONTRIB_TMP_FILE3} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt")
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
configure_file(${CONTRIB_TMP_FILE3} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt")
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
execute_process(COMMAND "${CMAKE_COMMAND}" --build .
execute_process(COMMAND "${CMAKE_COMMAND}" --build .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
set(CONTRIB_TMP_FILE2 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in2")
configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2})
set(CONTRIB_TMP_FILE2 "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in2")
configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2})
if(${BUILD_WITH_COS})
cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2})
endif(${BUILD_WITH_COS})
if(${BUILD_WITH_COS})
cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE2})
endif(${BUILD_WITH_COS})
configure_file(${CONTRIB_TMP_FILE2} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt")
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
configure_file(${CONTRIB_TMP_FILE2} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt")
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
execute_process(COMMAND "${CMAKE_COMMAND}" --build .
execute_process(COMMAND "${CMAKE_COMMAND}" --build .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
endif()
set(CONTRIB_TMP_FILE "${CMAKE_BINARY_DIR}/deps_tmp_CMakeLists.txt.in")
configure_file("${TD_SUPPORT_DIR}/deps_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
@ -59,7 +56,7 @@ endif()
# taosadapter
if(${BUILD_HTTP})
MESSAGE("BUILD_HTTP is on")
else ()
else()
MESSAGE("BUILD_HTTP is off, use taosAdapter")
cat("${TD_SUPPORT_DIR}/taosadapter_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
endif()
@ -110,19 +107,18 @@ cat("${TD_SUPPORT_DIR}/zlib_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
cat("${TD_SUPPORT_DIR}/cjson_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# xz
#cat("${TD_SUPPORT_DIR}/xz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/xz_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
#lzma2
# lzma2
cat("${TD_SUPPORT_DIR}/lzma_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
if (${BUILD_CONTRIB})
if(${BUILD_CONTRIB})
if(${BUILD_WITH_ROCKSDB})
cat("${TD_SUPPORT_DIR}/rocksdb_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
add_definitions(-DUSE_ROCKSDB)
endif()
else()
if (NOT ${TD_LINUX})
if(NOT ${TD_LINUX})
if(${BUILD_WITH_ROCKSDB})
cat("${TD_SUPPORT_DIR}/rocksdb_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
add_definitions(-DUSE_ROCKSDB)
@ -134,9 +130,9 @@ else()
endif()
endif()
#cat("${TD_SUPPORT_DIR}/zstd_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/zstd_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
#libuv
# libuv
if(${BUILD_WITH_UV})
cat("${TD_SUPPORT_DIR}/libuv_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
endif(${BUILD_WITH_UV})
@ -152,17 +148,17 @@ if(${BUILD_WITH_S3})
cat("${TD_SUPPORT_DIR}/xml2_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
cat("${TD_SUPPORT_DIR}/libs3_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
cat("${TD_SUPPORT_DIR}/azure_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
add_definitions(-DUSE_S3)
# cos
elseif(${BUILD_WITH_COS})
#cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
#cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
#cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
#cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/mxml_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/apr_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/apr-util_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
# cat("${TD_SUPPORT_DIR}/curl_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
cat("${TD_SUPPORT_DIR}/cos_CMakeLists.txt.in" ${CONTRIB_TMP_FILE})
add_definitions(-DUSE_COS)
endif()
# crashdump
@ -191,9 +187,9 @@ endif()
# download dependencies
configure_file(${CONTRIB_TMP_FILE} "${TD_CONTRIB_DIR}/deps-download/CMakeLists.txt")
execute_process(COMMAND "${CMAKE_COMMAND}" -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
execute_process(COMMAND "${CMAKE_COMMAND}" --build .
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
WORKING_DIRECTORY "${TD_CONTRIB_DIR}/deps-download")
# ================================================================================================
# Build
@ -206,25 +202,27 @@ if(${BUILD_TEST})
gtest
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cpp-stub/src>
)
if(${TD_WINDOWS})
target_include_directories(
gtest
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cpp-stub/src_win>
)
endif(${TD_WINDOWS})
if(${TD_LINUX})
target_include_directories(
gtest
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cpp-stub/src_linux>
)
endif(${TD_LINUX})
if(${TD_DARWIN})
target_include_directories(
gtest
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cpp-stub/src_darwin>
)
endif(${TD_DARWIN})
endif(${BUILD_TEST})
# cJson
@ -236,15 +234,16 @@ option(CJSON_BUILD_SHARED_LIBS "Overrides BUILD_SHARED_LIBS if CJSON_OVERRIDE_BU
add_subdirectory(cJson EXCLUDE_FROM_ALL)
target_include_directories(
cjson
# see https://stackoverflow.com/questions/25676277/cmake-target-include-directories-prints-an-error-when-i-try-to-add-the-source
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cJson>
)
unset(CMAKE_PROJECT_INCLUDE_BEFORE)
# xml2
#if(${BUILD_WITH_S3})
# add_subdirectory(xml2 EXCLUDE_FROM_ALL)
#endif()
# if(${BUILD_WITH_S3})
# add_subdirectory(xml2 EXCLUDE_FROM_ALL)
# endif()
# lz4
add_subdirectory(lz4/build/cmake EXCLUDE_FROM_ALL)
@ -255,10 +254,12 @@ target_include_directories(
# zlib
set(CMAKE_PROJECT_INCLUDE_BEFORE "${TD_SUPPORT_DIR}/EnableCMP0048.txt.in")
if(${TD_DARWIN})
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=deprecated-non-prototype")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=deprecated-non-prototype")
endif(${TD_DARWIN})
add_subdirectory(zlib EXCLUDE_FROM_ALL)
target_include_directories(
zlibstatic
@ -274,9 +275,9 @@ unset(CMAKE_PROJECT_INCLUDE_BEFORE)
# add_subdirectory(xz EXCLUDE_FROM_ALL)
# target_include_directories(
# xz
# PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/xz
# PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/xz
# xz
# PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/xz
# PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/xz
# )
# leveldb
@ -291,24 +292,27 @@ endif(${BUILD_WITH_LEVELDB})
# rocksdb
# To support rocksdb build on ubuntu: sudo apt-get install libgflags-dev
if (${BUILD_WITH_UV})
if(${BUILD_WITH_UV})
if(${TD_LINUX})
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}")
IF ("${CMAKE_BUILD_TYPE}" STREQUAL "")
if("${CMAKE_BUILD_TYPE}" STREQUAL "")
SET(CMAKE_BUILD_TYPE Release)
endif()
endif(${TD_LINUX})
endif (${BUILD_WITH_UV})
endif(${BUILD_WITH_UV})
if (${BUILD_WITH_ROCKSDB})
if (${BUILD_CONTRIB})
if(${BUILD_WITH_ROCKSDB})
if(${BUILD_CONTRIB})
if(${TD_LINUX})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL} -Wno-error=maybe-uninitialized -Wno-error=unused-but-set-variable -Wno-error=unused-variable -Wno-error=unused-function -Wno-errno=unused-private-field -Wno-error=unused-result")
if ("${CMAKE_BUILD_TYPE}" STREQUAL "")
if("${CMAKE_BUILD_TYPE}" STREQUAL "")
SET(CMAKE_BUILD_TYPE Release)
endif()
endif(${TD_LINUX})
MESSAGE(STATUS "ROCKSDB CXXXX STATUS CONFIG: " ${CMAKE_CXX_FLAGS})
MESSAGE(STATUS "ROCKSDB C STATUS CONFIG: " ${CMAKE_C_FLAGS})
@ -316,22 +320,23 @@ if (${BUILD_WITH_ROCKSDB})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=maybe-uninitialized")
endif(${TD_DARWIN})
if (${TD_DARWIN_ARM64})
if(${TD_DARWIN_ARM64})
set(HAS_ARMV8_CRC true)
endif(${TD_DARWIN_ARM64})
if (${TD_WINDOWS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819")
if(${TD_WINDOWS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819 /std:c++17")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4244 /wd4819")
option(WITH_JNI "" OFF)
if(CMAKE_C_FLAGS MATCHES "/MT" OR CMAKE_C_FLAGS MATCHES "/MTd")
message("Rocksdb build runtime lib use /MT or /MTd")
option(WITH_MD_LIBRARY "build with MD" OFF)
message("Rocksdb build runtime lib use /MT or /MTd")
option(WITH_MD_LIBRARY "build with MD" OFF)
endif()
set(SYSTEM_LIBS ${SYSTEM_LIBS} shlwapi.lib rpcrt4.lib)
endif(${TD_WINDOWS})
if(${TD_DARWIN})
option(HAVE_THREAD_LOCAL "" OFF)
option(WITH_IOSTATS_CONTEXT "" OFF)
@ -357,30 +362,32 @@ if (${BUILD_WITH_ROCKSDB})
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include>
)
else()
if (NOT ${TD_LINUX})
if(NOT ${TD_LINUX})
MESSAGE(STATUS "ROCKSDB CXX STATUS CONFIG: " ${CMAKE_CXX_FLAGS})
MESSAGE(STATUS "ROCKSDB C STATUS CONFIG: " ${CMAKE_C_FLAGS})
if(${TD_DARWIN})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-error=maybe-uninitialized")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-error=maybe-uninitialized")
endif(${TD_DARWIN})
if (${TD_DARWIN_ARM64})
if(${TD_DARWIN_ARM64})
set(HAS_ARMV8_CRC true)
endif(${TD_DARWIN_ARM64})
if (${TD_WINDOWS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819")
if(${TD_WINDOWS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4244 /wd4819 /std:c++17")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4244 /wd4819")
option(WITH_JNI "" OFF)
if(CMAKE_C_FLAGS MATCHES "/MT" OR CMAKE_C_FLAGS MATCHES "/MTd")
message("Rocksdb build runtime lib use /MT or /MTd")
option(WITH_MD_LIBRARY "build with MD" OFF)
endif()
set(SYSTEM_LIBS ${SYSTEM_LIBS} shlwapi.lib rpcrt4.lib)
endif(${TD_WINDOWS})
if(${TD_DARWIN})
option(HAVE_THREAD_LOCAL "" OFF)
option(WITH_IOSTATS_CONTEXT "" OFF)
@ -406,44 +413,44 @@ if (${BUILD_WITH_ROCKSDB})
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include>
)
endif()
endif()
endif()
if(${BUILD_WITH_S3})
INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.2/include)
MESSAGE("build with s3: ${BUILD_WITH_S3}")
# cos
elseif(${BUILD_WITH_COS})
if(${TD_LINUX})
set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.1)
#ADD_DEFINITIONS(-DMINIXML_LIBRARY=${CMAKE_BINARY_DIR}/build/lib/libxml.a)
option(ENABLE_TEST "Enable the tests" OFF)
INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.1/include)
#MESSAGE("$ENV{HOME}/.cos-local.1/include")
if(${TD_LINUX})
set(CMAKE_PREFIX_PATH $ENV{HOME}/.cos-local.1)
set(CMAKE_BUILD_TYPE Release)
set(ORIG_CMAKE_PROJECT_NAME ${CMAKE_PROJECT_NAME})
set(CMAKE_PROJECT_NAME cos_c_sdk)
# ADD_DEFINITIONS(-DMINIXML_LIBRARY=${CMAKE_BINARY_DIR}/build/lib/libxml.a)
option(ENABLE_TEST "Enable the tests" OFF)
INCLUDE_DIRECTORIES($ENV{HOME}/.cos-local.1/include)
add_subdirectory(cos-c-sdk-v5 EXCLUDE_FROM_ALL)
target_include_directories(
cos_c_sdk
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cos-c-sdk-v5/cos_c_sdk>
)
# MESSAGE("$ENV{HOME}/.cos-local.1/include")
set(CMAKE_BUILD_TYPE Release)
set(ORIG_CMAKE_PROJECT_NAME ${CMAKE_PROJECT_NAME})
set(CMAKE_PROJECT_NAME cos_c_sdk)
set(CMAKE_PROJECT_NAME ${ORIG_CMAKE_PROJECT_NAME})
else()
endif(${TD_LINUX})
add_subdirectory(cos-c-sdk-v5 EXCLUDE_FROM_ALL)
target_include_directories(
cos_c_sdk
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/cos-c-sdk-v5/cos_c_sdk>
)
set(CMAKE_PROJECT_NAME ${ORIG_CMAKE_PROJECT_NAME})
else()
endif(${TD_LINUX})
endif()
# pthread
if(${BUILD_PTHREAD})
if ("${CMAKE_BUILD_TYPE}" STREQUAL "")
SET(CMAKE_BUILD_TYPE Release)
if("${CMAKE_BUILD_TYPE}" STREQUAL "")
SET(CMAKE_BUILD_TYPE Release)
endif()
add_definitions(-DPTW32_STATIC_LIB)
add_subdirectory(pthread EXCLUDE_FROM_ALL)
set_target_properties(libpthreadVC3 PROPERTIES OUTPUT_NAME pthread)
@ -451,16 +458,15 @@ if(${BUILD_PTHREAD})
target_link_libraries(pthread INTERFACE libpthreadVC3)
endif()
# jemalloc
if(${JEMALLOC_ENABLED})
include(ExternalProject)
ExternalProject_Add(jemalloc
PREFIX "jemalloc"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc
BUILD_IN_SOURCE 1
CONFIGURE_COMMAND ./autogen.sh COMMAND ./configure --prefix=${CMAKE_BINARY_DIR}/build/ --disable-initial-exec-tls
BUILD_COMMAND ${MAKE}
PREFIX "jemalloc"
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/jemalloc
BUILD_IN_SOURCE 1
CONFIGURE_COMMAND ./autogen.sh COMMAND ./configure --prefix=${CMAKE_BINARY_DIR}/build/ --disable-initial-exec-tls
BUILD_COMMAND ${MAKE}
)
INCLUDE_DIRECTORIES(${CMAKE_BINARY_DIR}/build/include)
endif()
@ -514,12 +520,13 @@ endif(${BUILD_WCWIDTH})
# LIBUV
if(${BUILD_WITH_UV})
if (TD_WINDOWS)
if(TD_WINDOWS)
# There is no GetHostNameW function on win7.
file(READ "libuv/src/win/util.c" LIBUV_WIN_UTIL_CONTENT)
string(REPLACE "if (GetHostNameW(buf, UV_MAXHOSTNAMESIZE" "DWORD nSize = UV_MAXHOSTNAMESIZE;\n if (GetComputerNameW(buf, &nSize" LIBUV_WIN_UTIL_CONTENT "${LIBUV_WIN_UTIL_CONTENT}")
file(WRITE "libuv/src/win/util.c" "${LIBUV_WIN_UTIL_CONTENT}")
endif ()
endif()
add_subdirectory(libuv EXCLUDE_FROM_ALL)
endif(${BUILD_WITH_UV})
@ -535,6 +542,7 @@ if(${BUILD_WITH_SQLITE})
INTERFACE m
INTERFACE pthread
)
if(NOT TD_WINDOWS)
target_link_libraries(sqlite
INTERFACE dl
@ -545,36 +553,38 @@ endif(${BUILD_WITH_SQLITE})
# addr2line
if(${BUILD_ADDR2LINE})
if(NOT ${TD_WINDOWS})
check_include_file( "sys/types.h" HAVE_SYS_TYPES_H)
check_include_file( "sys/stat.h" HAVE_SYS_STAT_H )
check_include_file( "inttypes.h" HAVE_INTTYPES_H )
check_include_file( "stddef.h" HAVE_STDDEF_H )
check_include_file( "stdlib.h" HAVE_STDLIB_H )
check_include_file( "string.h" HAVE_STRING_H )
check_include_file( "memory.h" HAVE_MEMORY_H )
check_include_file( "strings.h" HAVE_STRINGS_H )
check_include_file( "stdint.h" HAVE_STDINT_H )
check_include_file( "unistd.h" HAVE_UNISTD_H )
check_include_file( "sgidefs.h" HAVE_SGIDEFS_H )
check_include_file( "stdafx.h" HAVE_STDAFX_H )
check_include_file( "elf.h" HAVE_ELF_H )
check_include_file( "libelf.h" HAVE_LIBELF_H )
check_include_file( "libelf/libelf.h" HAVE_LIBELF_LIBELF_H)
check_include_file( "alloca.h" HAVE_ALLOCA_H )
check_include_file( "elfaccess.h" HAVE_ELFACCESS_H)
check_include_file( "sys/elf_386.h" HAVE_SYS_ELF_386_H )
check_include_file( "sys/elf_amd64.h" HAVE_SYS_ELF_AMD64_H)
check_include_file( "sys/elf_sparc.h" HAVE_SYS_ELF_SPARC_H)
check_include_file( "sys/ia64/elf.h" HAVE_SYS_IA64_ELF_H )
check_include_file("sys/types.h" HAVE_SYS_TYPES_H)
check_include_file("sys/stat.h" HAVE_SYS_STAT_H)
check_include_file("inttypes.h" HAVE_INTTYPES_H)
check_include_file("stddef.h" HAVE_STDDEF_H)
check_include_file("stdlib.h" HAVE_STDLIB_H)
check_include_file("string.h" HAVE_STRING_H)
check_include_file("memory.h" HAVE_MEMORY_H)
check_include_file("strings.h" HAVE_STRINGS_H)
check_include_file("stdint.h" HAVE_STDINT_H)
check_include_file("unistd.h" HAVE_UNISTD_H)
check_include_file("sgidefs.h" HAVE_SGIDEFS_H)
check_include_file("stdafx.h" HAVE_STDAFX_H)
check_include_file("elf.h" HAVE_ELF_H)
check_include_file("libelf.h" HAVE_LIBELF_H)
check_include_file("libelf/libelf.h" HAVE_LIBELF_LIBELF_H)
check_include_file("alloca.h" HAVE_ALLOCA_H)
check_include_file("elfaccess.h" HAVE_ELFACCESS_H)
check_include_file("sys/elf_386.h" HAVE_SYS_ELF_386_H)
check_include_file("sys/elf_amd64.h" HAVE_SYS_ELF_AMD64_H)
check_include_file("sys/elf_sparc.h" HAVE_SYS_ELF_SPARC_H)
check_include_file("sys/ia64/elf.h" HAVE_SYS_IA64_ELF_H)
set(VERSION 0.3.1)
set(PACKAGE_VERSION "\"${VERSION}\"")
configure_file(libdwarf/cmake/config.h.cmake config.h)
file(GLOB_RECURSE LIBDWARF_SOURCES "libdwarf/src/lib/libdwarf/*.c")
add_library(libdwarf STATIC ${LIBDWARF_SOURCES})
set_target_properties(libdwarf PROPERTIES OUTPUT_NAME "libdwarf")
if(HAVE_LIBELF_H OR HAVE_LIBELF_LIBELF_H)
target_link_libraries(libdwarf PUBLIC libelf)
endif()
target_include_directories(libdwarf SYSTEM PUBLIC "libdwarf/src/lib/libdwarf" ${CMAKE_CURRENT_BINARY_DIR})
file(READ "addr2line/addr2line.c" ADDR2LINE_CONTENT)
string(REPLACE "static int" "int" ADDR2LINE_CONTENT "${ADDR2LINE_CONTENT}")
@ -583,7 +593,7 @@ if(${BUILD_ADDR2LINE})
file(WRITE "addr2line/addr2line.c" "${ADDR2LINE_CONTENT}")
add_library(addr2line STATIC "addr2line/addr2line.c")
target_link_libraries(addr2line PUBLIC libdwarf dl z)
target_include_directories(addr2line PUBLIC "libdwarf/src/lib/libdwarf" )
target_include_directories(addr2line PUBLIC "libdwarf/src/lib/libdwarf")
endif(NOT ${TD_WINDOWS})
endif(${BUILD_ADDR2LINE})
@ -592,31 +602,41 @@ if(${BUILD_GEOS})
if(${TD_LINUX})
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS_REL}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS_REL}")
if ("${CMAKE_BUILD_TYPE}" STREQUAL "")
if("${CMAKE_BUILD_TYPE}" STREQUAL "")
SET(CMAKE_BUILD_TYPE Release)
endif()
endif(${TD_LINUX})
option(BUILD_SHARED_LIBS "Build GEOS with shared libraries" OFF)
add_subdirectory(geos EXCLUDE_FROM_ALL)
if (${TD_WINDOWS})
if(${TD_WINDOWS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}")
else ()
else()
unset(CMAKE_CXX_STANDARD CACHE) # undo libgeos's setting of global CMAKE_CXX_STANDARD
endif(${TD_WINDOWS})
target_include_directories(
geos_c
PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/geos/include>
)
endif(${BUILD_GEOS})
if (${BUILD_PCRE2})
if(${BUILD_PCRE2})
add_subdirectory(pcre2 EXCLUDE_FROM_ALL)
endif(${BUILD_PCRE2})
if(${TD_LINUX} AND ${BUILD_WITH_S3})
add_subdirectory(azure-cmake EXCLUDE_FROM_ALL)
endif()
# ================================================================================================
# Build test
# ================================================================================================
MESSAGE("build with dependency tests: ${BUILD_DEPENDENCY_TESTS}")
if(${BUILD_DEPENDENCY_TESTS})
add_subdirectory(test EXCLUDE_FROM_ALL)
endif(${BUILD_DEPENDENCY_TESTS})

View File

@ -0,0 +1,73 @@
# lib_azure_sdk
set(AZURE_DIR "${TD_CONTRIB_DIR}/azure-sdk-for-cpp-azure-storage-blobs_12.13.0-beta.1")
set(AZURE_SDK_LIBRARY_DIR "${AZURE_DIR}/sdk")
file(GLOB AZURE_SDK_SRC
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/credentials/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/cryptography/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/http/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/http/curl/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/io/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/src/tracing/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/identity/azure-identity/src/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/src/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/src/private/*.cpp"
"${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-common/src/*.cpp"
)
file(GLOB AZURE_SDK_UNIFIED_SRC
${AZURE_SDK_SRC}
)
set(AZURE_SDK_INCLUDES
"${AZURE_SDK_LIBRARY_DIR}/core/azure-core/inc/"
"${AZURE_SDK_LIBRARY_DIR}/identity/azure-identity/inc/"
"${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-common/inc/"
"${AZURE_SDK_LIBRARY_DIR}/storage/azure-storage-blobs/inc/"
)
add_library(_azure_sdk STATIC ${AZURE_SDK_UNIFIED_SRC})
target_compile_definitions(_azure_sdk PRIVATE BUILD_CURL_HTTP_TRANSPORT_ADAPTER)
target_include_directories(
_azure_sdk
PUBLIC "$ENV{HOME}/.cos-local.2/include"
)
find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
# find_library(CURL_LIBRARY curl)
# find_library(XML2_LIBRARY xml2)
find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
# find_library(CoreFoundation_Library CoreFoundation)
# find_library(SystemConfiguration_Library SystemConfiguration)
target_link_libraries(
_azure_sdk
PRIVATE ${CURL_LIBRARY}
PRIVATE ${SSL_LIBRARY}
PRIVATE ${CRYPTO_LIBRARY}
PRIVATE ${XML2_LIBRARY}
# PRIVATE xml2
PRIVATE zlib
# PRIVATE ${CoreFoundation_Library}
# PRIVATE ${SystemConfiguration_Library}
)
# Originally, on Windows azure-core is built with bcrypt and crypt32 by default
if(TARGET OpenSSL::SSL)
target_link_libraries(_azure_sdk PRIVATE OpenSSL::Crypto OpenSSL::SSL)
endif()
# Originally, on Windows azure-core is built with winhttp by default
if(TARGET td_contrib::curl)
target_link_libraries(_azure_sdk PRIVATE td_contrib::curl)
endif()
target_include_directories(_azure_sdk SYSTEM BEFORE PUBLIC ${AZURE_SDK_INCLUDES})
add_library(td_contrib::azure_sdk ALIAS _azure_sdk)

View File

@ -28,5 +28,6 @@ if(${BUILD_WITH_TRAFT})
# add_subdirectory(traft)
endif(${BUILD_WITH_TRAFT})
add_subdirectory(azure)
add_subdirectory(tdev)
add_subdirectory(lz4)

View File

@ -0,0 +1,27 @@
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED True)
add_executable(
azure-test
main.cpp
)
find_library(CURL_LIBRARY curl $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
find_library(XML2_LIBRARY xml2 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
# find_library(XML2_LIBRARY xml2)
find_library(SSL_LIBRARY ssl $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
find_library(CRYPTO_LIBRARY crypto $ENV{HOME}/.cos-local.2/lib64 $ENV{HOME}/.cos-local.2/lib NO_DEFAULT_PATH)
# find_library(CoreFoundation_Library CoreFoundation)
# find_library(SystemConfiguration_Library SystemConfiguration)
target_link_libraries(
azure-test
PRIVATE _azure_sdk
PRIVATE ${CURL_LIBRARY}
PRIVATE ${XML2_LIBRARY}
PRIVATE ${SSL_LIBRARY}
PRIVATE ${CRYPTO_LIBRARY}
PRIVATE dl
PRIVATE pthread
)

View File

@ -0,0 +1,99 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <iostream>
// Include the necessary SDK headers
#include <azure/core.hpp>
#include <azure/storage/blobs.hpp>
// Add appropriate using namespace directives
using namespace Azure::Storage;
using namespace Azure::Storage::Blobs;
// Secrets should be stored & retrieved from secure locations such as Azure::KeyVault. For
// convenience and brevity of samples, the secrets are retrieved from environment variables.
std::string GetEndpointUrl() {
// return std::getenv("AZURE_STORAGE_ACCOUNT_URL");
std::string accountId = getenv("ablob_account_id");
if (accountId.empty()) {
return accountId;
}
return accountId + ".blob.core.windows.net";
}
std::string GetAccountName() {
// return std::getenv("AZURE_STORAGE_ACCOUNT_NAME");
return getenv("ablob_account_id");
}
std::string GetAccountKey() {
// return std::getenv("AZURE_STORAGE_ACCOUNT_KEY");
return getenv("ablob_account_secret");
}
int main() {
std::string endpointUrl = GetEndpointUrl();
std::string accountName = GetAccountName();
std::string accountKey = GetAccountKey();
try {
auto sharedKeyCredential = std::make_shared<StorageSharedKeyCredential>(accountName, accountKey);
std::string accountURL = "https://fd2d01cd892f844eeaa2273.blob.core.windows.net";
BlobServiceClient blobServiceClient(accountURL, sharedKeyCredential);
std::string containerName = "myblobcontainer";
// auto containerClient = blobServiceClient.GetBlobContainerClient("myblobcontainer");
auto containerClient = blobServiceClient.GetBlobContainerClient("td-test");
// Create the container if it does not exist
std::cout << "Creating container: " << containerName << std::endl;
// containerClient.CreateIfNotExists();
std::string blobName = "blob.txt";
uint8_t blobContent[] = "Hello Azure!";
// Create the block blob client
BlockBlobClient blobClient = containerClient.GetBlockBlobClient(blobName);
// Upload the blob
std::cout << "Uploading blob: " << blobName << std::endl;
blobClient.UploadFrom(blobContent, sizeof(blobContent));
/*
auto blockBlobClient = BlockBlobClient(endpointUrl, sharedKeyCredential);
// Create some data to upload into the blob.
std::vector<uint8_t> data = {1, 2, 3, 4};
Azure::Core::IO::MemoryBodyStream stream(data);
Azure::Response<Models::UploadBlockBlobResult> response = blockBlobClient.Upload(stream);
Models::UploadBlockBlobResult model = response.Value;
std::cout << "Last modified date of uploaded blob: " << model.LastModified.ToString()
<< std::endl;
*/
} catch (const Azure::Core::RequestFailedException& e) {
std::cout << "Status Code: " << static_cast<int>(e.StatusCode) << ", Reason Phrase: " << e.ReasonPhrase
<< std::endl;
std::cout << e.what() << std::endl;
return 1;
}
return 0;
}

View File

@ -90,7 +90,7 @@ If `maven` is used to manage the projects, what needs to be done is only adding
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.3.3</version>
</dependency>
```

View File

@ -585,7 +585,7 @@ def process(block):
return result
```
Crate and test the UDF:
Create and test the UDF:
```sql
create function nsum as '/root/udf/nsum.py' outputtype double language 'Python';

View File

@ -54,91 +54,102 @@ Command-line arguments take precedence over environment variables over configura
```shell
Usage of taosAdapter:
--collectd.db string collectd db name. Env "TAOS_ADAPTER_COLLECTD_DB" (default "collectd")
--collectd.enable enable collectd. Env "TAOS_ADAPTER_COLLECTD_ENABLE" (default true)
--collectd.password string collectd password. Env "TAOS_ADAPTER_COLLECTD_PASSWORD" (default "taosdata")
--collectd.port int collectd server port. Env "TAOS_ADAPTER_COLLECTD_PORT" (default 6045)
--collectd.ttl int collectd data ttl. Env "TAOS_ADAPTER_COLLECTD_TTL"
--collectd.user string collectd user. Env "TAOS_ADAPTER_COLLECTD_USER" (default "root")
--collectd.worker int collectd write worker. Env "TAOS_ADAPTER_COLLECTD_WORKER" (default 10)
-c, --config string config path default /etc/taos/taosadapter.toml
--cors.allowAllOrigins cors allow all origins. Env "TAOS_ADAPTER_CORS_ALLOW_ALL_ORIGINS" (default true)
--cors.allowCredentials cors allow credentials. Env "TAOS_ADAPTER_CORS_ALLOW_Credentials"
--cors.allowHeaders stringArray cors allow HEADERS. Env "TAOS_ADAPTER_ALLOW_HEADERS"
--cors.allowOrigins stringArray cors allow origins. Env "TAOS_ADAPTER_ALLOW_ORIGINS"
--cors.allowWebSockets cors allow WebSockets. Env "TAOS_ADAPTER_CORS_ALLOW_WebSockets" --cors.exposeHeaders stringArray cors expose headers. Env "TAOS_ADAPTER_Expose_Headers"
--debug enable debug mode. Env "TAOS_ADAPTER_DEBUG" (default true)
--help Print this help message and exit
--httpCodeServerError Use a non-200 http status code when server returns an error. Env "TAOS_ADAPTER_HTTP_CODE_SERVER_ERROR"
--influxdb.enable enable influxdb. Env "TAOS_ADAPTER_INFLUXDB_ENABLE" (default true)
--log.enableRecordHttpSql whether to record http sql. Env "TAOS_ADAPTER_LOG_ENABLE_RECORD_HTTP_SQL"
--log.path string log path. Env "TAOS_ADAPTER_LOG_PATH" (default "/var/log/taos") --log.rotationCount uint log rotation count. Env "TAOS_ADAPTER_LOG_ROTATION_COUNT" (default 30)
--log.rotationSize string log rotation size(KB MB GB), must be a positive integer. Env "TAOS_ADAPTER_LOG_ROTATION_SIZE" (default "1GB")
--log.rotationTime duration log rotation time. Env "TAOS_ADAPTER_LOG_ROTATION_TIME" (default 24h0m0s)
--log.sqlRotationCount uint record sql log rotation count. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_COUNT" (default 2)
--log.sqlRotationSize string record sql log rotation size(KB MB GB), must be a positive integer. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_SIZE" (default "1GB")
--log.sqlRotationTime duration record sql log rotation time. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_TIME" (default 24h0m0s)
--logLevel string log level (panic fatal error warn warning info debug trace). Env "TAOS_ADAPTER_LOG_LEVEL" (default "info")
--monitor.collectDuration duration Set monitor duration. Env "TAOS_ADAPTER_MONITOR_COLLECT_DURATION" (default 3s)
--monitor.disable Whether to disable monitoring. Env "TAOS_ADAPTER_MONITOR_DISABLE"
--monitor.disableCollectClientIP Whether to disable collecting clientIP. Env "TAOS_ADAPTER_MONITOR_DISABLE_COLLECT_CLIENT_IP"
--monitor.identity string The identity of the current instance, or 'hostname:port' if it is empty. Env "TAOS_ADAPTER_MONITOR_IDENTITY"
--monitor.incgroup Whether running in cgroup. Env "TAOS_ADAPTER_MONITOR_INCGROUP"
--monitor.password string TDengine password. Env "TAOS_ADAPTER_MONITOR_PASSWORD" (default "taosdata")
--monitor.pauseAllMemoryThreshold float Memory percentage threshold for pause all. Env "TAOS_ADAPTER_MONITOR_PAUSE_ALL_MEMORY_THRESHOLD" (default 80)
--monitor.pauseQueryMemoryThreshold float Memory percentage threshold for pause query. Env "TAOS_ADAPTER_MONITOR_PAUSE_QUERY_MEMORY_THRESHOLD" (default 70)
--monitor.user string TDengine user. Env "TAOS_ADAPTER_MONITOR_USER" (default "root") --monitor.writeInterval duration Set write to TDengine interval. Env "TAOS_ADAPTER_MONITOR_WRITE_INTERVAL" (default 30s)
--monitor.writeToTD Whether write metrics to TDengine. Env "TAOS_ADAPTER_MONITOR_WRITE_TO_TD"
--node_exporter.caCertFile string node_exporter ca cert file path. Env "TAOS_ADAPTER_NODE_EXPORTER_CA_CERT_FILE"
--node_exporter.certFile string node_exporter cert file path. Env "TAOS_ADAPTER_NODE_EXPORTER_CERT_FILE"
--node_exporter.db string node_exporter db name. Env "TAOS_ADAPTER_NODE_EXPORTER_DB" (default "node_exporter")
--node_exporter.enable enable node_exporter. Env "TAOS_ADAPTER_NODE_EXPORTER_ENABLE"
--node_exporter.gatherDuration duration node_exporter gather duration. Env "TAOS_ADAPTER_NODE_EXPORTER_GATHER_DURATION" (default 5s)
--node_exporter.httpBearerTokenString string node_exporter http bearer token. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_BEARER_TOKEN_STRING"
--node_exporter.httpPassword string node_exporter http password. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_PASSWORD"
--node_exporter.httpUsername string node_exporter http username. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_USERNAME"
--node_exporter.insecureSkipVerify node_exporter skip ssl check. Env "TAOS_ADAPTER_NODE_EXPORTER_INSECURE_SKIP_VERIFY" (default true)
--node_exporter.keyFile string node_exporter cert key file path. Env "TAOS_ADAPTER_NODE_EXPORTER_KEY_FILE"
--node_exporter.password string node_exporter password. Env "TAOS_ADAPTER_NODE_EXPORTER_PASSWORD" (default "taosdata")
--node_exporter.responseTimeout duration node_exporter response timeout. Env "TAOS_ADAPTER_NODE_EXPORTER_RESPONSE_TIMEOUT" (default 5s)
--node_exporter.ttl int node_exporter data ttl. Env "TAOS_ADAPTER_NODE_EXPORTER_TTL"
--node_exporter.urls strings node_exporter urls. Env "TAOS_ADAPTER_NODE_EXPORTER_URLS" (default [http://localhost:9100])
--node_exporter.user string node_exporter user. Env "TAOS_ADAPTER_NODE_EXPORTER_USER" (default "root")
--opentsdb.enable enable opentsdb. Env "TAOS_ADAPTER_OPENTSDB_ENABLE" (default true)
--opentsdb_telnet.batchSize int opentsdb_telnet batch size. Env "TAOS_ADAPTER_OPENTSDB_TELNET_BATCH_SIZE" (default 1)
--opentsdb_telnet.dbs strings opentsdb_telnet db names. Env "TAOS_ADAPTER_OPENTSDB_TELNET_DBS" (default [opentsdb_telnet,collectd_tsdb,icinga2_tsdb,tcollector_tsdb])
--opentsdb_telnet.enable enable opentsdb telnet,warning: without auth info(default false). Env "TAOS_ADAPTER_OPENTSDB_TELNET_ENABLE"
--opentsdb_telnet.flushInterval duration opentsdb_telnet flush interval (0s means not valid) . Env "TAOS_ADAPTER_OPENTSDB_TELNET_FLUSH_INTERVAL"
--opentsdb_telnet.maxTCPConnections int max tcp connections. Env "TAOS_ADAPTER_OPENTSDB_TELNET_MAX_TCP_CONNECTIONS" (default 250)
--opentsdb_telnet.password string opentsdb_telnet password. Env "TAOS_ADAPTER_OPENTSDB_TELNET_PASSWORD" (default "taosdata")
--opentsdb_telnet.ports ints opentsdb telnet tcp port. Env "TAOS_ADAPTER_OPENTSDB_TELNET_PORTS" (default [6046,6047,6048,6049])
--opentsdb_telnet.tcpKeepAlive enable tcp keep alive. Env "TAOS_ADAPTER_OPENTSDB_TELNET_TCP_KEEP_ALIVE"
--opentsdb_telnet.ttl int opentsdb_telnet data ttl. Env "TAOS_ADAPTER_OPENTSDB_TELNET_TTL"
--opentsdb_telnet.user string opentsdb_telnet user. Env "TAOS_ADAPTER_OPENTSDB_TELNET_USER" (default "root")
--pool.idleTimeout duration Set idle connection timeout. Env "TAOS_ADAPTER_POOL_IDLE_TIMEOUT"
--pool.maxConnect int max connections to server. Env "TAOS_ADAPTER_POOL_MAX_CONNECT"
--pool.maxIdle int max idle connections to server. Env "TAOS_ADAPTER_POOL_MAX_IDLE"
-P, --port int http port. Env "TAOS_ADAPTER_PORT" (default 6041)
--prometheus.enable enable prometheus. Env "TAOS_ADAPTER_PROMETHEUS_ENABLE" (default true)
--restfulRowLimit int restful returns the maximum number of rows (-1 means no limit). Env "TAOS_ADAPTER_RESTFUL_ROW_LIMIT" (default -1)
--smlAutoCreateDB Whether to automatically create db when writing with schemaless. Env "TAOS_ADAPTER_SML_AUTO_CREATE_DB"
--statsd.allowPendingMessages int statsd allow pending messages. Env "TAOS_ADAPTER_STATSD_ALLOW_PENDING_MESSAGES" (default 50000)
--statsd.db string statsd db name. Env "TAOS_ADAPTER_STATSD_DB" (default "statsd") --statsd.deleteCounters statsd delete counter cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_COUNTERS" (default true)
--statsd.deleteGauges statsd delete gauge cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_GAUGES" (default true)
--statsd.deleteSets statsd delete set cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_SETS" (default true)
--statsd.deleteTimings statsd delete timing cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_TIMINGS" (default true)
--statsd.enable enable statsd. Env "TAOS_ADAPTER_STATSD_ENABLE" (default true)
--statsd.gatherInterval duration statsd gather interval. Env "TAOS_ADAPTER_STATSD_GATHER_INTERVAL" (default 5s)
--statsd.maxTCPConnections int statsd max tcp connections. Env "TAOS_ADAPTER_STATSD_MAX_TCP_CONNECTIONS" (default 250)
--statsd.password string statsd password. Env "TAOS_ADAPTER_STATSD_PASSWORD" (default "taosdata")
--statsd.port int statsd server port. Env "TAOS_ADAPTER_STATSD_PORT" (default 6044)
--statsd.protocol string statsd protocol [tcp or udp]. Env "TAOS_ADAPTER_STATSD_PROTOCOL" (default "udp")
--statsd.tcpKeepAlive enable tcp keep alive. Env "TAOS_ADAPTER_STATSD_TCP_KEEP_ALIVE" --statsd.ttl int statsd data ttl. Env "TAOS_ADAPTER_STATSD_TTL"
--statsd.user string statsd user. Env "TAOS_ADAPTER_STATSD_USER" (default "root")
--statsd.worker int statsd write worker. Env "TAOS_ADAPTER_STATSD_WORKER" (default 10)
--taosConfigDir string load taos client config path. Env "TAOS_ADAPTER_TAOS_CONFIG_FILE"
--tmq.releaseIntervalMultiplierForAutocommit int When set to autocommit, the interval for message release is a multiple of the autocommit interval, with a default value of 2 and a minimum value of 1 and a maximum value of 10. Env "TAOS_ADAPTER_TMQ_RELEASE_INTERVAL_MULTIPLIER_FOR_AUTOCOMMIT" (default 2)
--version Print the version and exit
--collectd.db string collectd db name. Env "TAOS_ADAPTER_COLLECTD_DB" (default "collectd")
--collectd.enable enable collectd. Env "TAOS_ADAPTER_COLLECTD_ENABLE" (default true)
--collectd.password string collectd password. Env "TAOS_ADAPTER_COLLECTD_PASSWORD" (default "taosdata")
--collectd.port int collectd server port. Env "TAOS_ADAPTER_COLLECTD_PORT" (default 6045)
--collectd.ttl int collectd data ttl. Env "TAOS_ADAPTER_COLLECTD_TTL"
--collectd.user string collectd user. Env "TAOS_ADAPTER_COLLECTD_USER" (default "root")
--collectd.worker int collectd write worker. Env "TAOS_ADAPTER_COLLECTD_WORKER" (default 10)
-c, --config string config path default /etc/taos/taosadapter.toml
--cors.allowAllOrigins cors allow all origins. Env "TAOS_ADAPTER_CORS_ALLOW_ALL_ORIGINS" (default true)
--cors.allowCredentials cors allow credentials. Env "TAOS_ADAPTER_CORS_ALLOW_Credentials"
--cors.allowHeaders stringArray cors allow HEADERS. Env "TAOS_ADAPTER_ALLOW_HEADERS"
--cors.allowOrigins stringArray cors allow origins. Env "TAOS_ADAPTER_ALLOW_ORIGINS"
--cors.allowWebSockets cors allow WebSockets. Env "TAOS_ADAPTER_CORS_ALLOW_WebSockets"
--cors.exposeHeaders stringArray cors expose headers. Env "TAOS_ADAPTER_Expose_Headers"
--debug enable debug mode. Env "TAOS_ADAPTER_DEBUG" (default true)
--help Print this help message and exit
--httpCodeServerError Use a non-200 http status code when server returns an error. Env "TAOS_ADAPTER_HTTP_CODE_SERVER_ERROR"
--influxdb.enable enable influxdb. Env "TAOS_ADAPTER_INFLUXDB_ENABLE" (default true)
--instanceId int instance ID. Env "TAOS_ADAPTER_INSTANCE_ID" (default 32)
--log.compress whether to compress old log. Env "TAOS_ADAPTER_LOG_COMPRESS"
--log.enableRecordHttpSql whether to record http sql. Env "TAOS_ADAPTER_LOG_ENABLE_RECORD_HTTP_SQL"
--log.level string log level (trace debug info warning error). Env "TAOS_ADAPTER_LOG_LEVEL" (default "info")
--log.path string log path. Env "TAOS_ADAPTER_LOG_PATH" (default "/var/log/taos")
--log.reservedDiskSize string reserved disk size for log dir (KB MB GB), must be a positive integer. Env "TAOS_ADAPTER_LOG_RESERVED_DISK_SIZE" (default "1GB")
--log.rotationCount uint log rotation count. Env "TAOS_ADAPTER_LOG_ROTATION_COUNT" (default 30)
--log.rotationSize string log rotation size(KB MB GB), must be a positive integer. Env "TAOS_ADAPTER_LOG_ROTATION_SIZE" (default "1GB")
--log.rotationTime duration deprecated: log rotation time always 24 hours. Env "TAOS_ADAPTER_LOG_ROTATION_TIME" (default 24h0m0s)
--log.sqlRotationCount uint record sql log rotation count. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_COUNT" (default 2)
--log.sqlRotationSize string record sql log rotation size(KB MB GB), must be a positive integer. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_SIZE" (default "1GB")
--log.sqlRotationTime duration record sql log rotation time. Env "TAOS_ADAPTER_LOG_SQL_ROTATION_TIME" (default 24h0m0s)
--logLevel string log level (trace debug info warning error). Env "TAOS_ADAPTER_LOG_LEVEL" (default "info")
--monitor.collectDuration duration Set monitor duration. Env "TAOS_ADAPTER_MONITOR_COLLECT_DURATION" (default 3s)
--monitor.disable Whether to disable monitoring. Env "TAOS_ADAPTER_MONITOR_DISABLE" (default true)
--monitor.identity string The identity of the current instance, or 'hostname:port' if it is empty. Env "TAOS_ADAPTER_MONITOR_IDENTITY"
--monitor.incgroup Whether running in cgroup. Env "TAOS_ADAPTER_MONITOR_INCGROUP"
--monitor.pauseAllMemoryThreshold float Memory percentage threshold for pause all. Env "TAOS_ADAPTER_MONITOR_PAUSE_ALL_MEMORY_THRESHOLD" (default 80)
--monitor.pauseQueryMemoryThreshold float Memory percentage threshold for pause query. Env "TAOS_ADAPTER_MONITOR_PAUSE_QUERY_MEMORY_THRESHOLD" (default 70)
--node_exporter.caCertFile string node_exporter ca cert file path. Env "TAOS_ADAPTER_NODE_EXPORTER_CA_CERT_FILE"
--node_exporter.certFile string node_exporter cert file path. Env "TAOS_ADAPTER_NODE_EXPORTER_CERT_FILE"
--node_exporter.db string node_exporter db name. Env "TAOS_ADAPTER_NODE_EXPORTER_DB" (default "node_exporter")
--node_exporter.enable enable node_exporter. Env "TAOS_ADAPTER_NODE_EXPORTER_ENABLE"
--node_exporter.gatherDuration duration node_exporter gather duration. Env "TAOS_ADAPTER_NODE_EXPORTER_GATHER_DURATION" (default 5s)
--node_exporter.httpBearerTokenString string node_exporter http bearer token. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_BEARER_TOKEN_STRING"
--node_exporter.httpPassword string node_exporter http password. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_PASSWORD"
--node_exporter.httpUsername string node_exporter http username. Env "TAOS_ADAPTER_NODE_EXPORTER_HTTP_USERNAME"
--node_exporter.insecureSkipVerify node_exporter skip ssl check. Env "TAOS_ADAPTER_NODE_EXPORTER_INSECURE_SKIP_VERIFY" (default true)
--node_exporter.keyFile string node_exporter cert key file path. Env "TAOS_ADAPTER_NODE_EXPORTER_KEY_FILE"
--node_exporter.password string node_exporter password. Env "TAOS_ADAPTER_NODE_EXPORTER_PASSWORD" (default "taosdata")
--node_exporter.responseTimeout duration node_exporter response timeout. Env "TAOS_ADAPTER_NODE_EXPORTER_RESPONSE_TIMEOUT" (default 5s)
--node_exporter.ttl int node_exporter data ttl. Env "TAOS_ADAPTER_NODE_EXPORTER_TTL"
--node_exporter.urls strings node_exporter urls. Env "TAOS_ADAPTER_NODE_EXPORTER_URLS" (default [http://localhost:9100])
--node_exporter.user string node_exporter user. Env "TAOS_ADAPTER_NODE_EXPORTER_USER" (default "root")
--opentsdb.enable enable opentsdb. Env "TAOS_ADAPTER_OPENTSDB_ENABLE" (default true)
--opentsdb_telnet.batchSize int opentsdb_telnet batch size. Env "TAOS_ADAPTER_OPENTSDB_TELNET_BATCH_SIZE" (default 1)
--opentsdb_telnet.dbs strings opentsdb_telnet db names. Env "TAOS_ADAPTER_OPENTSDB_TELNET_DBS" (default [opentsdb_telnet,collectd_tsdb,icinga2_tsdb,tcollector_tsdb])
--opentsdb_telnet.enable enable opentsdb telnet,warning: without auth info(default false). Env "TAOS_ADAPTER_OPENTSDB_TELNET_ENABLE"
--opentsdb_telnet.flushInterval duration opentsdb_telnet flush interval (0s means not valid) . Env "TAOS_ADAPTER_OPENTSDB_TELNET_FLUSH_INTERVAL"
--opentsdb_telnet.maxTCPConnections int max tcp connections. Env "TAOS_ADAPTER_OPENTSDB_TELNET_MAX_TCP_CONNECTIONS" (default 250)
--opentsdb_telnet.password string opentsdb_telnet password. Env "TAOS_ADAPTER_OPENTSDB_TELNET_PASSWORD" (default "taosdata")
--opentsdb_telnet.ports ints opentsdb telnet tcp port. Env "TAOS_ADAPTER_OPENTSDB_TELNET_PORTS" (default [6046,6047,6048,6049])
--opentsdb_telnet.tcpKeepAlive enable tcp keep alive. Env "TAOS_ADAPTER_OPENTSDB_TELNET_TCP_KEEP_ALIVE"
--opentsdb_telnet.ttl int opentsdb_telnet data ttl. Env "TAOS_ADAPTER_OPENTSDB_TELNET_TTL"
--opentsdb_telnet.user string opentsdb_telnet user. Env "TAOS_ADAPTER_OPENTSDB_TELNET_USER" (default "root")
--pool.idleTimeout duration Set idle connection timeout. Env "TAOS_ADAPTER_POOL_IDLE_TIMEOUT"
--pool.maxConnect int max connections to server. Env "TAOS_ADAPTER_POOL_MAX_CONNECT"
--pool.maxIdle int max idle connections to server. Env "TAOS_ADAPTER_POOL_MAX_IDLE"
--pool.maxWait int max count of waiting for connection. Env "TAOS_ADAPTER_POOL_MAX_WAIT"
--pool.waitTimeout int wait for connection timeout seconds. Env "TAOS_ADAPTER_POOL_WAIT_TIMEOUT" (default 60)
-P, --port int http port. Env "TAOS_ADAPTER_PORT" (default 6041)
--prometheus.enable enable prometheus. Env "TAOS_ADAPTER_PROMETHEUS_ENABLE" (default true)
--restfulRowLimit int restful returns the maximum number of rows (-1 means no limit). Env "TAOS_ADAPTER_RESTFUL_ROW_LIMIT" (default -1)
--smlAutoCreateDB Whether to automatically create db when writing with schemaless. Env "TAOS_ADAPTER_SML_AUTO_CREATE_DB"
--statsd.allowPendingMessages int statsd allow pending messages. Env "TAOS_ADAPTER_STATSD_ALLOW_PENDING_MESSAGES" (default 50000)
--statsd.db string statsd db name. Env "TAOS_ADAPTER_STATSD_DB" (default "statsd")
--statsd.deleteCounters statsd delete counter cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_COUNTERS" (default true)
--statsd.deleteGauges statsd delete gauge cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_GAUGES" (default true)
--statsd.deleteSets statsd delete set cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_SETS" (default true)
--statsd.deleteTimings statsd delete timing cache after gather. Env "TAOS_ADAPTER_STATSD_DELETE_TIMINGS" (default true)
--statsd.enable enable statsd. Env "TAOS_ADAPTER_STATSD_ENABLE"
--statsd.gatherInterval duration statsd gather interval. Env "TAOS_ADAPTER_STATSD_GATHER_INTERVAL" (default 5s)
--statsd.maxTCPConnections int statsd max tcp connections. Env "TAOS_ADAPTER_STATSD_MAX_TCP_CONNECTIONS" (default 250)
--statsd.password string statsd password. Env "TAOS_ADAPTER_STATSD_PASSWORD" (default "taosdata")
--statsd.port int statsd server port. Env "TAOS_ADAPTER_STATSD_PORT" (default 6044)
--statsd.protocol string statsd protocol [tcp or udp]. Env "TAOS_ADAPTER_STATSD_PROTOCOL" (default "udp4")
--statsd.tcpKeepAlive enable tcp keep alive. Env "TAOS_ADAPTER_STATSD_TCP_KEEP_ALIVE"
--statsd.ttl int statsd data ttl. Env "TAOS_ADAPTER_STATSD_TTL"
--statsd.user string statsd user. Env "TAOS_ADAPTER_STATSD_USER" (default "root")
--statsd.worker int statsd write worker. Env "TAOS_ADAPTER_STATSD_WORKER" (default 10)
--taosConfigDir string load taos client config path. Env "TAOS_ADAPTER_TAOS_CONFIG_FILE"
--uploadKeeper.enable Whether to enable sending metrics to keeper. Env "TAOS_ADAPTER_UPLOAD_KEEPER_ENABLE" (default true)
--uploadKeeper.interval duration send to Keeper interval. Env "TAOS_ADAPTER_UPLOAD_KEEPER_INTERVAL" (default 15s)
--uploadKeeper.retryInterval duration retry interval. Env "TAOS_ADAPTER_UPLOAD_KEEPER_RETRY_INTERVAL" (default 5s)
--uploadKeeper.retryTimes uint retry times. Env "TAOS_ADAPTER_UPLOAD_KEEPER_RETRY_TIMES" (default 3)
--uploadKeeper.timeout duration send to Keeper timeout. Env "TAOS_ADAPTER_UPLOAD_KEEPER_TIMEOUT" (default 5s)
--uploadKeeper.url string Keeper url. Env "TAOS_ADAPTER_UPLOAD_KEEPER_URL" (default "http://127.0.0.1:6043/adapter_report")
-V, --version Print the version and exit
```
Note:
@ -159,6 +170,17 @@ For details on the CORS protocol, please refer to: [https://www.w3.org/wiki/CORS
See [example/config/taosadapter.toml](https://github.com/taosdata/taosadapter/blob/3.0/example/config/taosadapter.toml) for sample configuration files.
### Connection Pool Parameters
When using the RESTful interface, TDengine connections are managed through a connection pool. The pool can be configured with the following parameters:
- **`pool.maxConnect`**: The maximum number of connections allowed in the pool. The default value is 2 times the number of CPU cores. It is recommended to keep the default setting.
- **`pool.maxIdle`**: The maximum number of idle connections allowed in the pool. By default, this is the same as `pool.maxConnect`. It is recommended to keep the default setting.
- **`pool.idleTimeout`**: The idle timeout duration for connections. By default, connections never time out. It is recommended to keep the default setting.
- **`pool.waitTimeout`**: The timeout for acquiring a connection from the pool. The default value is 60 seconds. If a connection cannot be acquired within the timeout, an HTTP 503 status code will be returned. This parameter is available starting from version 3.3.3.0.
- **`pool.maxWait`**: The maximum number of requests waiting to acquire a connection from the pool. The default value is 0, meaning there is no limit. If the number of queued requests exceeds this value, new requests will return an HTTP 503 status code. This parameter is available starting from version 3.3.3.0.
## Feature List
- RESTful interface
@ -168,17 +190,17 @@ See [example/config/taosadapter.toml](https://github.com/taosdata/taosadapter/bl
- Compatible with OpenTSDB JSON and telnet format writes
- [http://opentsdb.net/docs/build/html/api_http/put.html](http://opentsdb.net/docs/build/html/api_http/put.html)
- [http://opentsdb.net/docs/build/html/api_telnet/put.html](http://opentsdb.net/docs/build/html/api_telnet/put.html)
- Seamless connection to collectd
- Seamless connection to collectd.
collectd is a system statistics collection daemon, please visit [https://collectd.org/](https://collectd.org/) for more information.
- Seamless connection with StatsD
- Seamless connection with StatsD.
StatsD is a simple yet powerful daemon for aggregating statistical information. Please visit [https://github.com/statsd/statsd](https://github.com/statsd/statsd) for more information.
- Seamless connection with icinga2
- Seamless connection with icinga2.
icinga2 is a software that collects inspection result metrics and performance data. Please visit [https://icinga.com/docs/icinga-2/latest/doc/14-features/#opentsdb-writer](https://icinga.com/docs/icinga-2/latest/doc/14-features/#opentsdb-writer) for more information.
- Seamless connection to TCollector
- Seamless connection to TCollector.
TCollector is a client process that collects data from a local collector and pushes the data to OpenTSDB. Please visit [http://opentsdb.net/docs/build/html/user_guide/utilities/tcollector.html](http://opentsdb.net/docs/build/html/user_guide/utilities/tcollector.html) for more information.
- Seamless connection to node_exporter
- Seamless connection to node_exporter.
node_export is an exporter for machine metrics. Please visit [https://github.com/prometheus/node_exporter](https://github.com/prometheus/node_exporter) for more information.
- Support for Prometheus remote_read and remote_write
- Support for Prometheus remote_read and remote_write.
remote_read and remote_write are interfaces for Prometheus data read and write from/to other data storage solution. Please visit [https://prometheus.io/blog/2019/10/10/remote-read-meets-streaming/#remote-apis](https://prometheus.io/blog/2019/10/10/remote-read-meets-streaming/#remote-apis) for more information.
- Get table's VGroup ID.

View File

@ -4,7 +4,7 @@ sidebar_label: Taos-Explorer
description: User guide about taosExplorer
---
taos-explorer is a web service which provides GUI based interactive database management tool.
taos-explorer is a web service which provides GUI based interactive database management tool. To ensure the best experience when accessing taosExplorer, please use Chrome version 79 or higher, Edge version 79 or higher.
## Install

View File

@ -20,8 +20,7 @@ table_options:
table_option: {
COMMENT 'string_value'
| SMA(col_name [, col_name] ...)
| TTL value
| SMA(col_name [, col_name] ...)
}
```

View File

@ -1384,7 +1384,7 @@ SELECT SERVER_VERSION();
SELECT SERVER_STATUS();
```
**Description**: The server status.
**Description**: The server status. When checking the status of a cluster, the recommended way is to use `SHOW CLUSTER ALIVE;`. Unlike `SELECT SERVER_STATUS();`, it does not return an error when some nodes in the cluster are unavailable; instead, it returns different status codes. Plese check [SHOW CLUSTER ALIVE](https://docs.tdengine.com/reference/taos-sql/show/#show-cluster-alive) for details.
### CURRENT_USER

View File

@ -19,7 +19,7 @@ After TDengine server or client installation, `taos.h` is located at
The dynamic libraries for the TDengine client driver are located in.
- Linux: `/usr/local/taos/driver/libtaos.so`
- Windows: `C:\TDengine\taos.dll`
- Windows: `C:\TDengine\driver\taos.dll`
- macOS: `/usr/local/lib/libtaos.dylib`
## Supported platforms

View File

@ -42,6 +42,7 @@ REST connection supports all platforms that can run Java.
| taos-jdbcdriver version | major changes | TDengine version |
| :---------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------: | :--------------: |
| 3.3.2 | 1. Optimized websocket prepareStatement performance; 2. Improved mybatis support| - |
| 3.3.0 | 1. Optimized data transmission performance under Websocket connection; 2. SSL validation skipping is supported but disabled by default| 3.3.2.0 or later |
| 3.2.11 | Fixed the result set closing bug when using a native connection.| - |
| 3.2.10 | 1. Automatic compression/decompression for data transmission, disabled by default; 2.Automatic reconnection for websocket with configurable parameter, disabled by default; 3. A new method for schemaless writing is added in the connection class; 4. Optimized performance for data fetching on native connection; 5. Fixing for some known issues; 6. The list of supported functions can be returned by the API for retrieving metadata| - |
@ -179,7 +180,7 @@ Add following dependency in the `pom.xml` file of your Maven project:
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.3.2</version>
</dependency>
```

View File

@ -231,8 +231,9 @@ Input following content:
"config":{
"connector.class": "com.taosdata.kafka.connect.source.TDengineSourceConnector",
"tasks.max": 1,
"subscription.group.id": "source-demo",
"connection.url": "jdbc:TAOS://127.0.0.1:6030",
"connection.username": "root",
"connection.user": "root",
"connection.password": "taosdata",
"connection.database": "test",
"connection.attempts": 3,

View File

@ -20,6 +20,10 @@ For TDengine 2.x installation packages by version, please visit [here](https://t
import Release from "/components/ReleaseV3";
## 3.3.3.0
<Release type="tdengine" version="3.3.3.0" />
## 3.3.2.0
<Release type="tdengine" version="3.3.2.0" />

View File

@ -19,7 +19,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.4.0</version>
</dependency>
<dependency>
<groupId>org.locationtech.jts</groupId>

View File

@ -1,11 +1,11 @@
## TDengine Spring JDBC Template Demo
`Spring JDBC Template` 简化了原生 JDBC Connection 获取释放等操作,使得操作数据库更加方便。
`Spring JDBC Template` simplifies the operations of acquiring and releasing native JDBC Connections, making database operations more convenient.
### 配置
### Configuration
修改 `src/main/resources/applicationContext.xml` 文件中 TDengine 的配置信息:
Modify the TDengine configuration in the `src/main/resources/applicationContext.xml` file:
```xml
<bean id="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource">
@ -20,13 +20,15 @@
</bean>
```
### 打包运行
### Package and run
Navigate to the `TDengine/tests/examples/JDBC/SpringJdbcTemplate` directory and execute the following commands to generate an executable jar file.
进入 `TDengine/tests/examples/JDBC/SpringJdbcTemplate` 目录下,执行以下命令可以生成可执行 jar 包。
```shell
mvn clean package
```
打包成功之后,进入 `target/` 目录下,执行以下命令就可运行测试:
After successfully packaging, navigate to the `target/` directory and execute the following commands to run the tests:
```shell
java -jar target/SpringJdbcTemplate-1.0-SNAPSHOT-jar-with-dependencies.jar
```
```

View File

@ -18,7 +18,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.4.0</version>
</dependency>
<!-- druid -->
<dependency>

View File

@ -1,21 +1,21 @@
### 设置###
### Settings###
log4j.rootLogger=debug,stdout,DebugLog,ErrorLog
### 输出信息到控制抬 ###
### Output information to the console ###
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n
### 输出DEBUG 级别以上的日志到=logs/debug.log
### Output logs of DEBUG level and above to logs/debug.log
log4j.appender.DebugLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DebugLog.File=logs/debug.log
log4j.appender.DebugLog.Append=true
log4j.appender.DebugLog.Threshold=DEBUG
log4j.appender.DebugLog.layout=org.apache.log4j.PatternLayout
log4j.appender.DebugLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
### 输出ERROR 级别以上的日志到=logs/error.log
### Output logs of ERROR level and above to logs/error.log
log4j.appender.ErrorLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.ErrorLog.File=logs/error.log
log4j.appender.ErrorLog.Append=true
log4j.appender.ErrorLog.Threshold=ERROR
log4j.appender.ErrorLog.layout=org.apache.log4j.PatternLayout
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n

View File

@ -1,27 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<something-else-entirely>
<proxool>
<!-- Alias for the data source -->
<alias>ds</alias>
<!--数据源的别名-->
<!-- URL connection string -->
<driver-url>jdbc:TAOS-RS://127.0.0.1:6041/log</driver-url>
<!--url连接串-->
<!-- Driver class -->
<driver-class>com.taosdata.jdbc.rs.RestfulDriver</driver-class>
<!--驱动类-->
<driver-properties>
<property name="user" value="root"/>
<property name="password" value="taosdata"/>
</driver-properties>
<!--最大连接数(默认5个),超过了这个连接数,再有请求时,就排在队列中等候,最大的等待请求数由maximum-new-connections决定 -->
<!-- Maximum connection count (default is 5). If this number is exceeded, new requests will be queued. The maximum number of queued requests is determined by maximum-new-connections -->
<maximum-connection-count>100</maximum-connection-count>
<!-- 定义连接池中的最大连接数 -->
<!-- Defines the maximum number of connections in the connection pool -->
<maximum-active-time>100</maximum-active-time>
<!--最少保持的空闲连接数(默认2个)-->
<!-- Minimum number of idle connections to maintain (default is 2) -->
<prototype-count>1</prototype-count>
<!--最小连接数(默认2个)-->
<!-- Minimum connection count (default is 2) -->
<minimum-connection-count>5</minimum-connection-count>
<!--proxool自动侦察各个连接状态的时间间隔(毫秒),侦察到空闲的连接就马上回收,超时的销毁 默认30秒-->
<!-- Interval (in milliseconds) for Proxool to automatically check the status of each connection. Idle connections are immediately reclaimed, and timed-out connections are destroyed. Default is 30 seconds -->
<house-keeping-sleep-time>30000</house-keeping-sleep-time>
<!--用于保持连接的测试语句 -->
<!-- Test statement used to maintain the connection -->
<house-keeping-test-sql>select server_version()</house-keeping-test-sql>
</proxool>
</something-else-entirely>
</something-else-entirely>

View File

@ -17,7 +17,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.2.1</version>
<version>3.4.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
@ -67,4 +67,4 @@
</plugins>
</build>
</project>
</project>

View File

@ -35,17 +35,18 @@ public class Worker implements Runnable {
public void run() {
while (!Thread.interrupted()) {
try {
// 控制请求频率
// Control request rate
if (semaphore.tryAcquire()) {
ConsumerRecords<Bean> records = consumer.poll(Duration.ofMillis(sleepTime));
pool.submit(() -> {
RateLimiter limiter = RateLimiter.create(rate);
try {
for (ConsumerRecord<Bean> record : records) {
// 流量控制
// Traffic control
limiter.acquire();
// 业务处理数据
System.out.println("[" + LocalDateTime.now() + "] Thread id:" + Thread.currentThread().getId() + " -> " + record.value());
// Business data processing
System.out.println("[" + LocalDateTime.now() + "] Thread id:"
+ Thread.currentThread().getId() + " -> " + record.value());
}
} finally {
semaphore.release();

View File

@ -1,14 +1,14 @@
# 使用说明
# Instructions
## 创建使用db
## Create and use the database
```shell
$ taos
> create database mp_test
```
## 执行测试用例
## Execute test cases
```shell
$ mvn clean test
```
```

View File

@ -1,6 +1,6 @@
## TDengine SpringBoot + Mybatis Demo
## 需要提前创建 test 数据库
## Need to create a test database in advance
```
$ taos -s 'create database if not exists test'
@ -8,7 +8,7 @@ $ taos -s 'create database if not exists test'
$ curl http://localhost:8080/weather/init
```
### 配置 application.properties
### Configure application.properties
```properties
# datasource config
spring.datasource.driver-class-name=com.taosdata.jdbc.TSDBDriver
@ -38,9 +38,9 @@ mybatis.mapper-locations=classpath:mapper/*.xml
logging.level.com.taosdata.jdbc.springbootdemo.dao=debug
```
### 主要功能
### Main functions
* 创建数据库和表
* Create databases and tables
```xml
<!-- weatherMapper.xml -->
<update id="createDB" >
@ -52,14 +52,14 @@ logging.level.com.taosdata.jdbc.springbootdemo.dao=debug
</update>
```
* 插入单条记录
* Insert a single record
```xml
<!-- weatherMapper.xml -->
<insert id="insert" parameterType="Weather" >
insert into test.weather (ts, temperature, humidity) values (now, #{temperature,jdbcType=INTEGER}, #{humidity,jdbcType=FLOAT})
</insert>
```
* 插入多条记录
* Insert multiple records
```xml
<!-- weatherMapper.xml -->
<insert id="batchInsert" parameterType="java.util.List" >
@ -69,7 +69,7 @@ logging.level.com.taosdata.jdbc.springbootdemo.dao=debug
</foreach>
</insert>
```
* 分页查询
* Pagination query
```xml
<!-- weatherMapper.xml -->
<?xml version="1.0" encoding="UTF-8"?>

View File

@ -67,7 +67,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.4.0</version>
<!-- <scope>system</scope>-->
<!-- <systemPath>${project.basedir}/src/main/resources/lib/taos-jdbcdriver-2.0.15-dist.jar</systemPath>-->
</dependency>

View File

@ -1,11 +1,14 @@
```
cd tests/examples/JDBC/taosdemo
mvn clean package -Dmaven.test.skip=true
# 先建表,再插入的
# Create tables first, then insert data
java -jar target/taosdemo-2.0.1-jar-with-dependencies.jar -host <hostname> -database <db name> -doCreateTable true -superTableSQL "create table weather(ts timestamp, f1 int) tags(t1 nchar(4))" -numOfTables 1000 -numOfRowsPerTable 100000000 -numOfThreadsForInsert 10 -numOfTablesPerSQL 10 -numOfValuesPerSQL 100
# 不建表,直接插入的
# Insert data directly without creating tables
java -jar target/taosdemo-2.0.1-jar-with-dependencies.jar -host <hostname> -database <db name> -doCreateTable false -superTableSQL "create table weather(ts timestamp, f1 int) tags(t1 nchar(4))" -numOfTables 1000 -numOfRowsPerTable 100000000 -numOfThreadsForInsert 10 -numOfTablesPerSQL 10 -numOfValuesPerSQL 100
```
如果发生错误 Exception in thread "main" java.lang.UnsatisfiedLinkError: no taos in java.library.path
请检查是否安装 TDengine 客户端安装包或编译 TDengine 安装。如果确定已经安装过还出现这个错误,可以在命令行 java 后加 -Djava.library.path=/usr/lib 来指定寻找共享库的路径。
If you encounter the error Exception in thread "main" `java.lang.UnsatisfiedLinkError: no taos in java.library.path`, please check whether the TDengine client package is installed or TDengine is compiled and installed. If you are sure it is installed and still encounter this error, you can add `-Djava.library.path=/usr/lib` after the `java` command to specify the path to the shared library.

View File

@ -24,14 +24,14 @@ public class TaosDemoApplication {
private static final Logger logger = LogManager.getLogger(TaosDemoApplication.class);
public static void main(String[] args) throws IOException {
// 读配置参数
// Read configuration parameters
JdbcTaosdemoConfig config = new JdbcTaosdemoConfig(args);
boolean isHelp = Arrays.asList(args).contains("--help");
if (isHelp || config.host == null || config.host.isEmpty()) {
JdbcTaosdemoConfig.printHelp();
System.exit(0);
}
// 初始化
//
final DataSource dataSource = DataSourceFactory.getInstance(config.host, config.port, config.user,
config.password);
if (config.executeSql != null && !config.executeSql.isEmpty()
@ -50,7 +50,7 @@ public class TaosDemoApplication {
final SuperTableService superTableService = new SuperTableService(dataSource);
final SubTableService subTableService = new SubTableService(dataSource);
// 创建数据库
// create database
long start = System.currentTimeMillis();
Map<String, String> databaseParam = new HashMap<>();
databaseParam.put("database", config.database);
@ -81,13 +81,13 @@ public class TaosDemoApplication {
config.prefixOfFields, config.numOfTags, config.prefixOfTags);
}
/**********************************************************************************/
// 建表
// create table
start = System.currentTimeMillis();
if (config.doCreateTable) {
superTableService.drop(superTableMeta.getDatabase(), superTableMeta.getName());
superTableService.create(superTableMeta);
if (!config.autoCreateTable) {
// 批量建子表
// create sub tables in batch
subTableService.createSubTable(superTableMeta, config.numOfTables, config.prefixOfTable,
config.numOfThreadsForCreate);
}
@ -95,7 +95,7 @@ public class TaosDemoApplication {
end = System.currentTimeMillis();
logger.info(">>> create table time cost : " + (end - start) + " ms.");
/**********************************************************************************/
// 插入
// insert data
long tableSize = config.numOfTables;
int threadSize = config.numOfThreadsForInsert;
long startTime = getProperStartTime(config.startTime, config.days);
@ -111,10 +111,9 @@ public class TaosDemoApplication {
end = System.currentTimeMillis();
logger.info("insert " + affectedRows + " rows, time cost: " + (end - start) + " ms");
/**********************************************************************************/
// 查询
/**********************************************************************************/
// 删除表
// drop table
if (config.dropTable) {
superTableService.drop(config.database, config.superTable);
}

View File

@ -9,21 +9,22 @@ import java.util.List;
@Repository
public interface SubTableMapper {
// 创建子表
// Create: SubTable
void createUsingSuperTable(SubTableMeta subTableMeta);
// 插入一张子表多个values
// Insert: Multiple records into one SubTable
int insertOneTableMultiValues(SubTableValue subTableValue);
// 插入一张子表多个values, 自动建表
// Insert: Multiple records into one SubTable, auto create SubTables
int insertOneTableMultiValuesUsingSuperTable(SubTableValue subTableValue);
// 插入多张表多个values
// Insert: Multiple records into multiple SubTable
int insertMultiTableMultiValues(List<SubTableValue> tables);
// 插入多张表多个values自动建表
// Insert: Multiple records into multiple SubTable, auto create SubTables
int insertMultiTableMultiValuesUsingSuperTable(List<SubTableValue> tables);
//<!-- TODO:修改子表标签值 alter table ${tablename} set tag tagName=newTagValue-->
// <!-- TODO: Modify SubTable tag value: alter table ${tablename} set tag
// tagName=newTagValue-->
}
}

View File

@ -6,24 +6,26 @@ import org.springframework.stereotype.Repository;
@Repository
public interface SuperTableMapper {
// 创建超级表 create table if not exists xxx.xxx (f1 type1, f2 type2, ... ) tags( t1 type1, t2 type2 ...)
// Create super table: create table if not exists xxx.xxx (f1 type1, f2 type2,
// ... ) tags( t1 type1, t2 type2 ...)
void createSuperTable(SuperTableMeta tableMetadata);
// 删除超级表 drop table if exists xxx;
// Drop super table: drop table if exists xxx;
void dropSuperTable(String database, String name);
//<!-- TODO:查询所有超级表信息 show stables -->
// <!-- TODO: Query all super table information show stables -->
//<!-- TODO:查询表结构 describe stable -->
// <!-- TODO: Query table structure describe stable -->
//<!-- TODO:增加列 alter table ${tablename} add column fieldName dataType -->
// <!-- TODO: Add column alter table ${tablename} add column fieldName dataType
// -->
//<!-- TODO:删除列 alter table ${tablename} drop column fieldName -->
// <!-- TODO: Drop column alter table ${tablename} drop column fieldName -->
//<!-- TODO:添加标签 alter table ${tablename} add tag new_tagName tag_type -->
// <!-- TODO: Add tag alter table ${tablename} add tag new_tagName tag_type -->
//<!-- TODO:删除标签 alter table ${tablename} drop tag_name -->
//<!-- TODO:修改标签名 alter table ${tablename} change tag old_tagName new_tagName -->
// <!-- TODO: Drop tag alter table ${tablename} drop tag_name -->
// <!-- TODO: Change tag name alter table ${tablename} change tag old_tagName
// new_tagName -->
}

View File

@ -9,19 +9,18 @@ import java.util.List;
@Repository
public interface TableMapper {
// 创建普通表
// Create: Normal table
void create(TableMeta tableMeta);
// 插入一张表多个value
// Insert: Multiple records into one table
int insertOneTableMultiValues(TableValue values);
// 插入: 一张表多个value指定的列
// Insert: Multiple records into one table, specified columns
int insertOneTableMultiValuesWithColumns(TableValue values);
// 插入多个表多个value
// Insert: Multiple records into multiple tables
int insertMultiTableMultiValues(List<TableValue> tables);
// 插入多个表多个value, 指定的列
// Insert: Multiple records into multiple tables, specified columns
int insertMultiTableMultiValuesWithColumns(List<TableValue> tables);
}
}

View File

@ -14,12 +14,12 @@ public class DatabaseService {
this.databaseMapper = new DatabaseMapperImpl(dataSource);
}
// 建库指定 name
// Create database with specified name
public void createDatabase(String database) {
databaseMapper.createDatabase(database);
}
// 建库指定参数 keep,days,replica等
// Create database with specified parameters such as keep, days, replica, etc.
public void createDatabase(Map<String, String> map) {
if (map.isEmpty())
return;

View File

@ -27,7 +27,8 @@ public class SubTableService extends AbstractService {
this.mapper = new SubTableMapperImpl(datasource);
}
public void createSubTable(SuperTableMeta superTableMeta, long numOfTables, String prefixOfTable, int numOfThreadsForCreate) {
public void createSubTable(SuperTableMeta superTableMeta, long numOfTables, String prefixOfTable,
int numOfThreadsForCreate) {
ExecutorService executor = Executors.newFixedThreadPool(numOfThreadsForCreate);
for (long i = 0; i < numOfTables; i++) {
long tableIndex = i;
@ -35,54 +36,58 @@ public class SubTableService extends AbstractService {
}
executor.shutdown();
try {
executor.awaitTermination(Long.MAX_VALUE,TimeUnit.NANOSECONDS);
executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void createSubTable(SuperTableMeta superTableMeta, String tableName) {
// 构造数据
// Construct data
SubTableMeta meta = SubTableMetaGenerator.generate(superTableMeta, tableName);
createSubTable(meta);
}
// 创建一张子表可以指定databasesupertabletablenametag值
// Create a sub-table, specifying database, super table, table name, and tag
// values
public void createSubTable(SubTableMeta subTableMeta) {
mapper.createUsingSuperTable(subTableMeta);
}
/*************************************************************************************************************************/
// 插入多线程多表
// Insert: Multi-threaded, multiple tables
public int insert(List<SubTableValue> subTableValues, int threadSize, int frequency) {
ExecutorService executor = Executors.newFixedThreadPool(threadSize);
Future<Integer> future = executor.submit(() -> insert(subTableValues));
executor.shutdown();
//TODOfrequency
// TODOfrequency
return getAffectRows(future);
}
// 插入单表insert into xxx values(),()...
// Insert: Single table, insert into xxx values(),()...
public int insert(SubTableValue subTableValue) {
return mapper.insertOneTableMultiValues(subTableValue);
}
// 插入: 多表insert into xxx values(),()... xxx values(),()...
// Insert: Multiple tables, insert into xxx values(),()... xxx values(),()...
public int insert(List<SubTableValue> subTableValues) {
return mapper.insertMultiTableMultiValues(subTableValues);
}
// 插入单表自动建表, insert into xxx using xxx tags(...) values(),()...
// Insert: Single table, auto-create table, insert into xxx using xxx tags(...)
// values(),()...
public int insertAutoCreateTable(SubTableValue subTableValue) {
return mapper.insertOneTableMultiValuesUsingSuperTable(subTableValue);
}
// 插入多表自动建表, insert into xxx using XXX tags(...) values(),()... xxx using XXX tags(...) values(),()...
// Insert: Multiple tables, auto-create tables, insert into xxx using XXX
// tags(...) values(),()... xxx using XXX tags(...) values(),()...
public int insertAutoCreateTable(List<SubTableValue> subTableValues) {
return mapper.insertMultiTableMultiValuesUsingSuperTable(subTableValues);
}
public int insertMultiThreads(SuperTableMeta superTableMeta, int threadSize, long tableSize, long startTime, long gap, JdbcTaosdemoConfig config) {
public int insertMultiThreads(SuperTableMeta superTableMeta, int threadSize, long tableSize, long startTime,
long gap, JdbcTaosdemoConfig config) {
List<FutureTask> taskList = new ArrayList<>();
List<Thread> threads = IntStream.range(0, threadSize)
.mapToObj(i -> {
@ -94,8 +99,7 @@ public class SubTableService extends AbstractService {
startTime, config.timeGap,
config.numOfRowsPerTable, config.numOfTablesPerSQL, config.numOfValuesPerSQL,
config.order, config.rate, config.range,
config.prefixOfTable, config.autoCreateTable)
);
config.prefixOfTable, config.autoCreateTable));
taskList.add(task);
return new Thread(task, "InsertThread-" + i);
}).collect(Collectors.toList());
@ -126,7 +130,7 @@ public class SubTableService extends AbstractService {
private class InsertTask implements Callable<Integer> {
private final long startTableInd; // included
private final long endTableInd; // excluded
private final long endTableInd; // excluded
private final long startTime;
private final long timeGap;
private final long numOfRowsPerTable;
@ -140,10 +144,10 @@ public class SubTableService extends AbstractService {
private final boolean autoCreateTable;
public InsertTask(SuperTableMeta superTableMeta, long startTableInd, long endTableInd,
long startTime, long timeGap,
long numOfRowsPerTable, long numOfTablesPerSQL, long numOfValuesPerSQL,
int order, int rate, long range,
String prefixOfTable, boolean autoCreateTable) {
long startTime, long timeGap,
long numOfRowsPerTable, long numOfTablesPerSQL, long numOfValuesPerSQL,
int order, int rate, long range,
String prefixOfTable, boolean autoCreateTable) {
this.superTableMeta = superTableMeta;
this.startTableInd = startTableInd;
this.endTableInd = endTableInd;
@ -159,7 +163,6 @@ public class SubTableService extends AbstractService {
this.autoCreateTable = autoCreateTable;
}
@Override
public Integer call() {
@ -171,23 +174,27 @@ public class SubTableService extends AbstractService {
int affectRows = 0;
// row
for (long rowCnt = 0; rowCnt < numOfRowsPerTable; ) {
for (long rowCnt = 0; rowCnt < numOfRowsPerTable;) {
long rowSize = numOfValuesPerSQL;
if (rowCnt + rowSize > numOfRowsPerTable) {
rowSize = numOfRowsPerTable - rowCnt;
}
//table
for (long tableCnt = startTableInd; tableCnt < endTableInd; ) {
// table
for (long tableCnt = startTableInd; tableCnt < endTableInd;) {
long tableSize = numOfTablesPerSQL;
if (tableCnt + tableSize > endTableInd) {
tableSize = endTableInd - tableCnt;
}
long startTime = this.startTime + rowCnt * timeGap;
// System.out.println(Thread.currentThread().getName() + " >>> " + "rowCnt: " + rowCnt + ", rowSize: " + rowSize + ", " + "tableCnt: " + tableCnt + ",tableSize: " + tableSize + ", " + "startTime: " + startTime + ",timeGap: " + timeGap + "");
// System.out.println(Thread.currentThread().getName() + " >>> " + "rowCnt: " +
// rowCnt + ", rowSize: " + rowSize + ", " + "tableCnt: " + tableCnt +
// ",tableSize: " + tableSize + ", " + "startTime: " + startTime + ",timeGap: "
// + timeGap + "");
/***********************************************/
// 生成数据
List<SubTableValue> data = SubTableValueGenerator.generate(superTableMeta, prefixOfTable, tableCnt, tableSize, rowSize, startTime, timeGap);
// 乱序
// Construct data
List<SubTableValue> data = SubTableValueGenerator.generate(superTableMeta, prefixOfTable, tableCnt,
tableSize, rowSize, startTime, timeGap);
// disorder
if (order != 0)
SubTableValueGenerator.disrupt(data, rate, range);
// insert
@ -205,5 +212,4 @@ public class SubTableService extends AbstractService {
}
}
}

View File

@ -14,7 +14,7 @@ public class SuperTableService {
this.superTableMapper = new SuperTableMapperImpl(dataSource);
}
// 创建超级表指定每个field的名称和类型每个tag的名称和类型
// Create super table, specifying the name and type of each field and each tag
public void create(SuperTableMeta superTableMeta) {
superTableMapper.createSuperTable(superTableMeta);
}

View File

@ -11,15 +11,14 @@ public class TableService extends AbstractService {
private TableMapper tableMapper;
//创建一张表
// Create a table
public void create(TableMeta tableMeta) {
tableMapper.create(tableMeta);
}
//创建多张表
// Create multiple tables
public void create(List<TableMeta> tables) {
tables.stream().forEach(this::create);
}
}

View File

@ -11,7 +11,8 @@ public class FieldValueGenerator {
public static Random random = new Random(System.currentTimeMillis());
// 生成start到end的时间序列时间戳为顺序不含有乱序field的value为随机生成
// Generate a time series from start to end, timestamps are in order without
// disorder, field values are randomly generated
public static List<RowValue> generate(long start, long end, long timeGap, List<FieldMeta> fieldMetaList) {
List<RowValue> values = new ArrayList<>();
@ -29,9 +30,12 @@ public class FieldValueGenerator {
return values;
}
// 生成start到end的时间序列时间戳为顺序含有乱序rate为乱序的比例range为乱序前跳范围field的value为随机生成
// Generate a time series from start to end, timestamps are in order but include
// disorder, rate is the proportion of disorder, range is the jump range before
// disorder, field values are randomly generated
public static List<RowValue> disrupt(List<RowValue> values, int rate, long range) {
long timeGap = (long) (values.get(1).getFields().get(0).getValue()) - (long) (values.get(0).getFields().get(0).getValue());
long timeGap = (long) (values.get(1).getFields().get(0).getValue())
- (long) (values.get(0).getFields().get(0).getValue());
int bugSize = values.size() * rate / 100;
Set<Integer> bugIndSet = new HashSet<>();
while (bugIndSet.size() < bugSize) {

View File

@ -9,7 +9,7 @@ import java.util.List;
public class SubTableMetaGenerator {
// 创建tableSize张子表使用tablePrefix作为子表名的前缀使用superTableMeta的元数据
// Create tableSize sub-tables, using tablePrefix as the prefix for sub-table names, and using the metadata from superTableMeta
// create table xxx using XXX tags(XXX)
public static List<SubTableMeta> generate(SuperTableMeta superTableMeta, int tableSize, String tablePrefix) {
List<SubTableMeta> subTableMetaList = new ArrayList<>();

View File

@ -10,10 +10,11 @@ import java.util.List;
public class SuperTableMetaGenerator {
// 创建超级表使用指定SQL语句
// Create super table using the specified SQL statement
public static SuperTableMeta generate(String superTableSQL) {
SuperTableMeta tableMeta = new SuperTableMeta();
// for example : create table superTable (ts timestamp, temperature float, humidity int) tags(location nchar(64), groupId int)
// for example : create table superTable (ts timestamp, temperature float,
// humidity int) tags(location nchar(64), groupId int)
superTableSQL = superTableSQL.trim().toLowerCase();
if (!superTableSQL.startsWith("create"))
throw new RuntimeException("invalid create super table SQL");
@ -54,8 +55,9 @@ public class SuperTableMetaGenerator {
return tableMeta;
}
// 创建超级表,指定field和tag的个数
public static SuperTableMeta generate(String database, String name, int fieldSize, String fieldPrefix, int tagSize, String tagPrefix) {
// Create super table with specified number of fields and tags
public static SuperTableMeta generate(String database, String name, int fieldSize, String fieldPrefix, int tagSize,
String tagPrefix) {
if (fieldSize < 2 || tagSize < 1) {
throw new RuntimeException("create super table but fieldSize less than 2 or tagSize less than 1");
}
@ -66,7 +68,8 @@ public class SuperTableMetaGenerator {
List<FieldMeta> fields = new ArrayList<>();
fields.add(new FieldMeta("ts", "timestamp"));
for (int i = 1; i <= fieldSize; i++) {
fields.add(new FieldMeta(fieldPrefix + "" + i, TaosConstants.DATA_TYPES[i % TaosConstants.DATA_TYPES.length]));
fields.add(
new FieldMeta(fieldPrefix + "" + i, TaosConstants.DATA_TYPES[i % TaosConstants.DATA_TYPES.length]));
}
tableMetadata.setFields(fields);
// tags

View File

@ -9,7 +9,7 @@ import java.util.List;
public class TagValueGenerator {
// 创建标签值使用tagMetas
// Create tag values using tagMetas
public static List<TagValue> generate(List<TagMeta> tagMetas) {
List<TagValue> tagValues = new ArrayList<>();
for (int i = 0; i < tagMetas.size(); i++) {

View File

@ -41,17 +41,17 @@ public class TimeStampUtil {
if (start == 0)
start = now - size * timeGap;
// 如果size小于1异常
// If size is less than 1, throw an exception
if (size < 1)
throw new IllegalArgumentException("size less than 1.");
// 如果timeGap为1已经超长需要前移start
// If timeGap is 1 and it exceeds the limit, move start forward
if (start + size > now) {
start = now - size;
return new TimeTuple(start, now, 1);
}
long end = start + (long) (timeGap * size);
if (end > now) {
//压缩timeGap
// Compress timeGap
end = now;
double gap = (end - start) / (size * 1.0f);
if (gap < 1.0f) {

View File

@ -1,21 +1,21 @@
### 设置###
### Settings ###
log4j.rootLogger=info,stdout
### 输出信息到控制抬 ###
### Output information to the console ###
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.Target=System.out
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%-5p] %d{yyyy-MM-dd HH:mm:ss,SSS} method:%l%n%m%n
### 输出DEBUG 级别以上的日志到=logs/debug.log
### Output logs of DEBUG level and above to logs/debug.log ###
log4j.appender.DebugLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DebugLog.File=logs/debug.log
log4j.appender.DebugLog.Append=true
log4j.appender.DebugLog.Threshold=DEBUG
log4j.appender.DebugLog.layout=org.apache.log4j.PatternLayout
log4j.appender.DebugLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
### 输出ERROR 级别以上的日志到=logs/error.log
### Output logs of ERROR level and above to logs/error.log ###
log4j.appender.ErrorLog=org.apache.log4j.DailyRollingFileAppender
log4j.appender.ErrorLog.File=logs/error.log
log4j.appender.ErrorLog.Append=true
log4j.appender.ErrorLog.Threshold=ERROR
log4j.appender.ErrorLog.layout=org.apache.log4j.PatternLayout
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
log4j.appender.ErrorLog.layout.ConversionPattern=%-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n

3
docs/examples/c-ws/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
*
!*.c
!.gitignore

View File

@ -0,0 +1,21 @@
// compile with
// gcc connect_example.c -o connect_example -ltaos
#include <stdio.h>
#include <stdlib.h>
#include "taosws.h"
int main() {
ws_enable_log("debug");
char *dsn = "ws://localhost:6041";
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
fprintf(stdout, "Connected to %s successfully.\n", dsn);
/* put your code here for read and write */
// close & clean
ws_close(taos);
}

View File

@ -0,0 +1,69 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o create_db_demo create_db_demo.c -ltaos
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taosws.h"
static int DemoCreateDB() {
ws_enable_log("debug");
// ANCHOR: create_db_and_table
int code = 0;
char *dsn = "ws://localhost:6041";
// connect
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
// create database
WS_RES *result = ws_query(taos, "CREATE DATABASE IF NOT EXISTS power");
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to create database power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(result));
ws_close(taos);
return -1;
}
ws_free_result(result);
fprintf(stdout, "Create database power successfully.\n");
// create table
const char *sql =
"CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId "
"INT, location BINARY(24))";
result = ws_query(taos, sql);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to create stable power.meters, ErrCode: 0x%x, ErrMessage: %s\n.", code, ws_errstr(result));
ws_close(taos);
return -1;
}
ws_free_result(result);
fprintf(stdout, "Create stable power.meters successfully.\n");
// close & clean
ws_close(taos);
return 0;
// ANCHOR_END: create_db_and_table
}
int main(int argc, char *argv[]) { return DemoCreateDB(); }

View File

@ -0,0 +1,67 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o insert_data_demo insert_data_demo.c -ltaos
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taosws.h"
static int DemoInsertData() {
// ANCHOR: insert_data
int code = 0;
char *dsn = "ws://localhost:6041";
// connect
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
// insert data, please make sure the database and table are already created
const char *sql =
"INSERT INTO "
"power.d1001 USING power.meters TAGS(2,'California.SanFrancisco') "
"VALUES "
"(NOW + 1a, 10.30000, 219, 0.31000) "
"(NOW + 2a, 12.60000, 218, 0.33000) "
"(NOW + 3a, 12.30000, 221, 0.31000) "
"power.d1002 USING power.meters TAGS(3, 'California.SanFrancisco') "
"VALUES "
"(NOW + 1a, 10.30000, 218, 0.25000) ";
WS_RES *result = ws_query(taos, sql);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to insert data to power.meters, sql: %s, ErrCode: 0x%x, ErrMessage: %s\n.", sql, code,
ws_errstr(result));
ws_close(taos);
return -1;
}
ws_free_result(result);
// you can check affectedRows here
int rows = ws_affected_rows(result);
fprintf(stdout, "Successfully inserted %d rows into power.meters.\n", rows);
// close & clean
ws_close(taos);
return 0;
// ANCHOR_END: insert_data
}
int main(int argc, char *argv[]) { return DemoInsertData(); }

View File

@ -0,0 +1,70 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o query_data_demo query_data_demo.c -ltaos
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taosws.h"
static int DemoQueryData() {
// ANCHOR: query_data
int code = 0;
char *dsn = "ws://localhost:6041";
// connect
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
// query data, please make sure the database and table are already created
const char *sql = "SELECT ts, current, location FROM power.meters limit 100";
WS_RES *result = ws_query(taos, sql);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to query data from power.meters, sql: %s, ErrCode: 0x%x, ErrMessage: %s\n.", sql, code,
ws_errstr(result));
ws_close(taos);
return -1;
}
WS_ROW row = NULL;
int rows = 0;
int num_fields = ws_field_count(result);
const WS_FIELD *fields = ws_fetch_fields(result);
fprintf(stdout, "query successfully, got %d fields, the sql is: %s.\n", num_fields, sql);
// fetch the records row by row
while ((row = ws_fetch_row(result))) {
// Add your data processing logic here
rows++;
}
fprintf(stdout, "total rows: %d\n", rows);
ws_free_result(result);
// close & clean
ws_close(taos);
return 0;
// ANCHOR_END: query_data
}
int main(int argc, char *argv[]) { return DemoQueryData(); }

View File

@ -0,0 +1,121 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o sml_insert_demo sml_insert_demo.c -ltaos
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taosws.h"
static int DemoSmlInsert() {
// ANCHOR: schemaless
int code = 0;
char *dsn = "ws://localhost:6041";
// connect
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
// create database
WS_RES *result = ws_query(taos, "CREATE DATABASE IF NOT EXISTS power");
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to create database power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(result));
ws_close(taos);
return -1;
}
ws_free_result(result);
// use database
result = ws_query(taos, "USE power");
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to execute use power, ErrCode: 0x%x, ErrMessage: %s\n.", code, ws_errstr(result));
ws_close(taos);
return -1;
}
ws_free_result(result);
// schemaless demo data
char *line_demo =
"meters,groupid=2,location=California.SanFrancisco current=10.3000002f64,voltage=219i32,phase=0.31f64 "
"1626006833639";
char *telnet_demo = "metric_telnet 1707095283260 4 host=host0 interface=eth0";
char *json_demo =
"{\"metric\": \"metric_json\",\"timestamp\": 1626846400,\"value\": 10.3, \"tags\": {\"groupid\": 2, "
"\"location\": \"California.SanFrancisco\", \"id\": \"d1001\"}}";
// influxdb line protocol
char *lines[] = {line_demo};
int totalLines = 0;
result = ws_schemaless_insert_raw(taos, line_demo, strlen(line_demo), &totalLines, WS_TSDB_SML_LINE_PROTOCOL,
WS_TSDB_SML_TIMESTAMP_MILLI_SECONDS);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to insert schemaless line data, data: %s, ErrCode: 0x%x, ErrMessage: %s\n.", line_demo,
code, ws_errstr(result));
ws_close(taos);
return -1;
}
fprintf(stdout, "Insert %d rows of schemaless line data successfully.\n", totalLines);
ws_free_result(result);
// opentsdb telnet protocol
totalLines = 0;
result = ws_schemaless_insert_raw(taos, telnet_demo, strlen(telnet_demo), &totalLines, WS_TSDB_SML_TELNET_PROTOCOL,
WS_TSDB_SML_TIMESTAMP_MILLI_SECONDS);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to insert schemaless telnet data, data: %s, ErrCode: 0x%x, ErrMessage: %s\n.", telnet_demo,
code, ws_errstr(result));
ws_close(taos);
return -1;
}
fprintf(stdout, "Insert %d rows of schemaless telnet data successfully.\n", totalLines);
ws_free_result(result);
// opentsdb json protocol
char *jsons[1] = {0};
// allocate memory for json data. can not use static memory.
totalLines = 0;
result = ws_schemaless_insert_raw(taos, json_demo, strlen(json_demo), &totalLines, WS_TSDB_SML_JSON_PROTOCOL,
WS_TSDB_SML_TIMESTAMP_MILLI_SECONDS);
code = ws_errno(result);
if (code != 0) {
free(jsons[0]);
fprintf(stderr, "Failed to insert schemaless json data, Server: %s, ErrCode: 0x%x, ErrMessage: %s\n.", json_demo,
code, ws_errstr(result));
ws_close(taos);
return -1;
}
free(jsons[0]);
fprintf(stdout, "Insert %d rows of schemaless json data successfully.\n", totalLines);
ws_free_result(result);
// close & clean
ws_close(taos);
return 0;
// ANCHOR_END: schemaless
}
int main(int argc, char *argv[]) { return DemoSmlInsert(); }

View File

@ -0,0 +1,183 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o stmt_insert_demo stmt_insert_demo.c -ltaos
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include "taosws.h"
/**
* @brief execute sql only.
*
* @param taos
* @param sql
*/
void executeSQL(WS_TAOS *taos, const char *sql) {
WS_RES *res = ws_query(taos, sql);
int code = ws_errno(res);
if (code != 0) {
fprintf(stderr, "%s\n", ws_errstr(res));
ws_free_result(res);
ws_close(taos);
exit(EXIT_FAILURE);
}
ws_free_result(res);
}
/**
* @brief check return status and exit program when error occur.
*
* @param stmt
* @param code
* @param msg
*/
void checkErrorCode(WS_STMT *stmt, int code, const char *msg) {
if (code != 0) {
fprintf(stderr, "%s. code: %d, error: %s\n", msg, code, ws_stmt_errstr(stmt));
ws_stmt_close(stmt);
exit(EXIT_FAILURE);
}
}
typedef struct {
int64_t ts;
float current;
int voltage;
float phase;
} Row;
int num_of_sub_table = 10;
int num_of_row = 10;
int total_affected = 0;
/**
* @brief insert data using stmt API
*
* @param taos
*/
void insertData(WS_TAOS *taos) {
// init
WS_STMT *stmt = ws_stmt_init(taos);
if (stmt == NULL) {
fprintf(stderr, "Failed to init ws_stmt, error: %s\n", ws_stmt_errstr(NULL));
exit(EXIT_FAILURE);
}
// prepare
const char *sql = "INSERT INTO ? USING meters TAGS(?,?) VALUES (?,?,?,?)";
int code = ws_stmt_prepare(stmt, sql, 0);
checkErrorCode(stmt, code, "Failed to execute ws_stmt_prepare");
for (int i = 1; i <= num_of_sub_table; i++) {
char table_name[20];
sprintf(table_name, "d_bind_%d", i);
char location[20];
sprintf(location, "location_%d", i);
// set table name and tags
WS_MULTI_BIND tags[2];
// groupId
tags[0].buffer_type = TSDB_DATA_TYPE_INT;
tags[0].buffer_length = sizeof(int);
tags[0].length = (int32_t *)&tags[0].buffer_length;
tags[0].buffer = &i;
tags[0].is_null = NULL;
tags[0].num = 1;
// location
tags[1].buffer_type = TSDB_DATA_TYPE_BINARY;
tags[1].buffer_length = strlen(location);
tags[1].length = (int32_t *)&tags[1].buffer_length;
tags[1].buffer = location;
tags[1].is_null = NULL;
tags[1].num = 1;
code = ws_stmt_set_tbname_tags(stmt, table_name, tags, 2);
checkErrorCode(stmt, code, "Failed to set table name and tags\n");
// insert rows
WS_MULTI_BIND params[4];
// ts
params[0].buffer_type = TSDB_DATA_TYPE_TIMESTAMP;
params[0].buffer_length = sizeof(int64_t);
params[0].length = (int32_t *)&params[0].buffer_length;
params[0].is_null = NULL;
params[0].num = 1;
// current
params[1].buffer_type = TSDB_DATA_TYPE_FLOAT;
params[1].buffer_length = sizeof(float);
params[1].length = (int32_t *)&params[1].buffer_length;
params[1].is_null = NULL;
params[1].num = 1;
// voltage
params[2].buffer_type = TSDB_DATA_TYPE_INT;
params[2].buffer_length = sizeof(int);
params[2].length = (int32_t *)&params[2].buffer_length;
params[2].is_null = NULL;
params[2].num = 1;
// phase
params[3].buffer_type = TSDB_DATA_TYPE_FLOAT;
params[3].buffer_length = sizeof(float);
params[3].length = (int32_t *)&params[3].buffer_length;
params[3].is_null = NULL;
params[3].num = 1;
for (int j = 0; j < num_of_row; j++) {
struct timeval tv;
gettimeofday(&tv, NULL);
long long milliseconds = tv.tv_sec * 1000LL + tv.tv_usec / 1000; // current timestamp in milliseconds
int64_t ts = milliseconds + j;
float current = (float)rand() / RAND_MAX * 30;
int voltage = rand() % 300;
float phase = (float)rand() / RAND_MAX;
params[0].buffer = &ts;
params[1].buffer = &current;
params[2].buffer = &voltage;
params[3].buffer = &phase;
// bind param
code = ws_stmt_bind_param_batch(stmt, params, 4);
checkErrorCode(stmt, code, "Failed to bind param");
}
// add batch
code = ws_stmt_add_batch(stmt);
checkErrorCode(stmt, code, "Failed to add batch");
// execute batch
int affected_rows = 0;
code = ws_stmt_execute(stmt, &affected_rows);
checkErrorCode(stmt, code, "Failed to exec stmt");
// get affected rows
int affected = ws_stmt_affected_rows_once(stmt);
total_affected += affected;
}
fprintf(stdout, "Successfully inserted %d rows to power.meters.\n", total_affected);
ws_stmt_close(stmt);
}
int main() {
int code = 0;
char *dsn = "ws://localhost:6041";
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
exit(EXIT_FAILURE);
}
// create database and table
executeSQL(taos, "CREATE DATABASE IF NOT EXISTS power");
executeSQL(taos, "USE power");
executeSQL(taos,
"CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS "
"(groupId INT, location BINARY(24))");
insertData(taos);
ws_close(taos);
}

View File

@ -0,0 +1,488 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// to compile: gcc -o tmq_demo tmq_demo.c -ltaos -lpthread
#include <assert.h>
#include <pthread.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <unistd.h>
#include "taosws.h"
volatile int thread_stop = 0;
static int running = 1;
static int count = 0;
const char* topic_name = "topic_meters";
typedef struct {
const char* enable_auto_commit;
const char* auto_commit_interval_ms;
const char* group_id;
const char* client_id;
const char* td_connect_host;
const char* td_connect_port;
const char* td_connect_user;
const char* td_connect_pass;
const char* auto_offset_reset;
} ConsumerConfig;
ConsumerConfig config = {.enable_auto_commit = "true",
.auto_commit_interval_ms = "1000",
.group_id = "group1",
.client_id = "client1",
.td_connect_host = "localhost",
.td_connect_port = "6030",
.td_connect_user = "root",
.td_connect_pass = "taosdata",
.auto_offset_reset = "latest"};
void* prepare_data(void* arg) {
int code = 0;
char* dsn = "ws://localhost:6041";
WS_TAOS* pConn = ws_connect(dsn);
if (pConn == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return NULL;
}
WS_RES* pRes;
int i = 1;
while (!thread_stop) {
char buf[200] = {0};
i++;
snprintf(
buf, sizeof(buf),
"INSERT INTO power.d1001 USING power.meters TAGS(2,'California.SanFrancisco') VALUES (NOW + %da, 10.30000, "
"219, 0.31000)",
i);
pRes = ws_query(pConn, buf);
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to insert data to power.meters, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
}
ws_free_result(pRes);
sleep(1);
}
fprintf(stdout, "Prepare data thread exit\n");
return NULL;
}
// ANCHOR: msg_process
int32_t msg_process(WS_RES* msg) {
int32_t rows = 0;
const char* topicName = ws_tmq_get_topic_name(msg);
const char* dbName = ws_tmq_get_db_name(msg);
int32_t vgroupId = ws_tmq_get_vgroup_id(msg);
while (true) {
// get one row data from message
WS_ROW row = ws_fetch_row(msg);
if (row == NULL) break;
// Add your data processing logic here
rows++;
}
return rows;
}
// ANCHOR_END: msg_process
WS_TAOS* init_env() {
int code = 0;
char* dsn = "ws://localhost:6041";
WS_TAOS* pConn = ws_connect(dsn);
if (pConn == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return NULL;
}
WS_RES* pRes;
// drop database if exists
pRes = ws_query(pConn, "DROP TOPIC IF EXISTS topic_meters");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to drop topic_meters, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
goto END;
}
ws_free_result(pRes);
pRes = ws_query(pConn, "DROP DATABASE IF EXISTS power");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to drop database power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
goto END;
}
ws_free_result(pRes);
// create database
pRes = ws_query(pConn, "CREATE DATABASE power PRECISION 'ms' WAL_RETENTION_PERIOD 3600");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to create power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
goto END;
}
ws_free_result(pRes);
// create super table
pRes =
ws_query(pConn,
"CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS "
"(groupId INT, location BINARY(24))");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to create super table meters, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
goto END;
}
ws_free_result(pRes);
return pConn;
END:
ws_free_result(pRes);
ws_close(pConn);
return NULL;
}
void deinit_env(WS_TAOS* pConn) {
if (pConn) ws_close(pConn);
}
int32_t create_topic(WS_TAOS* pConn) {
WS_RES* pRes;
int code = 0;
if (!pConn) {
fprintf(stderr, "Invalid input parameter.\n");
return -1;
}
pRes = ws_query(pConn, "USE power");
code = ws_errno(pRes);
if (ws_errno(pRes) != 0) {
fprintf(stderr, "Failed to use power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
return -1;
}
ws_free_result(pRes);
pRes = ws_query(
pConn,
"CREATE TOPIC IF NOT EXISTS topic_meters AS SELECT ts, current, voltage, phase, groupid, location FROM meters");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to create topic topic_meters, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
return -1;
}
ws_free_result(pRes);
return 0;
}
int32_t drop_topic(WS_TAOS* pConn) {
WS_RES* pRes;
int code = 0;
if (!pConn) {
fprintf(stderr, "Invalid input parameter.\n");
return -1;
}
pRes = ws_query(pConn, "USE power");
code = ws_errno(pRes);
if (ws_errno(pRes) != 0) {
fprintf(stderr, "Failed to use power, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
return -1;
}
ws_free_result(pRes);
pRes = ws_query(pConn, "DROP TOPIC IF EXISTS topic_meters");
code = ws_errno(pRes);
if (code != 0) {
fprintf(stderr, "Failed to drop topic topic_meters, ErrCode: 0x%x, ErrMessage: %s.\n", code, ws_errstr(pRes));
return -1;
}
ws_free_result(pRes);
return 0;
}
void tmq_commit_cb_print(ws_tmq_t* tmq, int32_t code, void* param) {
count += 1;
fprintf(stdout, "tmq_commit_cb_print() code: %d, tmq: %p, param: %p, count: %d.\n", code, tmq, param, count);
}
// ANCHOR: create_consumer_1
ws_tmq_t* build_consumer(const ConsumerConfig* config) {
ws_tmq_conf_res_t code;
ws_tmq_t* tmq = NULL;
// create a configuration object
ws_tmq_conf_t* conf = ws_tmq_conf_new();
// set the configuration parameters
code = ws_tmq_conf_set(conf, "enable.auto.commit", config->enable_auto_commit);
if (WS_TMQ_CONF_OK != code) {
ws_tmq_conf_destroy(conf);
return NULL;
}
code = ws_tmq_conf_set(conf, "auto.commit.interval.ms", config->auto_commit_interval_ms);
if (WS_TMQ_CONF_OK != code) {
ws_tmq_conf_destroy(conf);
return NULL;
}
code = ws_tmq_conf_set(conf, "group.id", config->group_id);
if (WS_TMQ_CONF_OK != code) {
ws_tmq_conf_destroy(conf);
return NULL;
}
code = ws_tmq_conf_set(conf, "client.id", config->client_id);
if (WS_TMQ_CONF_OK != code) {
ws_tmq_conf_destroy(conf);
return NULL;
}
code = ws_tmq_conf_set(conf, "auto.offset.reset", config->auto_offset_reset);
if (WS_TMQ_CONF_OK != code) {
ws_tmq_conf_destroy(conf);
return NULL;
}
// create a consumer object
tmq = ws_tmq_consumer_new(conf, "taos://localhost:6041", NULL, 0);
_end:
// destroy the configuration object
ws_tmq_conf_destroy(conf);
return tmq;
}
// ANCHOR_END: create_consumer_1
// ANCHOR: build_topic_list
// build a topic list used to subscribe
ws_tmq_list_t* build_topic_list() {
// create a empty topic list
ws_tmq_list_t* topicList = ws_tmq_list_new();
// append topic name to the list
int32_t code = ws_tmq_list_append(topicList, topic_name);
if (code) {
// if failed, destroy the list and return NULL
ws_tmq_list_destroy(topicList);
fprintf(stderr,
"Failed to create topic_list, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(NULL));
return NULL;
}
// if success, return the list
return topicList;
}
// ANCHOR_END: build_topic_list
// ANCHOR: basic_consume_loop
void basic_consume_loop(ws_tmq_t* tmq) {
int32_t totalRows = 0; // total rows consumed
int32_t msgCnt = 0; // total messages consumed
int32_t timeout = 5000; // poll timeout
while (running) {
// poll message from TDengine
WS_RES* tmqmsg = ws_tmq_consumer_poll(tmq, timeout);
if (tmqmsg) {
msgCnt++;
// Add your data processing logic here
totalRows += msg_process(tmqmsg);
// free the message
ws_free_result(tmqmsg);
}
if (msgCnt > 50) {
// consume 50 messages and break
break;
}
}
// print the result: total messages and total rows consumed
fprintf(stdout, "%d msg consumed, include %d rows\n", msgCnt, totalRows);
}
// ANCHOR_END: basic_consume_loop
// ANCHOR: consume_repeatly
void consume_repeatly(ws_tmq_t* tmq) {
int32_t numOfAssignment = 0;
ws_tmq_topic_assignment* pAssign = NULL;
// get the topic assignment
int32_t code = ws_tmq_get_topic_assignment(tmq, topic_name, &pAssign, &numOfAssignment);
if (code != 0 || pAssign == NULL || numOfAssignment == 0) {
fprintf(stderr, "Failed to get assignment, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(tmq));
return;
}
// seek to the earliest offset
for (int32_t i = 0; i < numOfAssignment; ++i) {
ws_tmq_topic_assignment* p = &pAssign[i];
code = ws_tmq_offset_seek(tmq, topic_name, p->vgId, p->begin);
if (code != 0) {
fprintf(stderr,
"Failed to seek offset, topic: %s, groupId: %s, clientId: %s, vgId: %d, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, p->vgId, code, ws_tmq_errstr(tmq));
break;
}
}
if (code == 0) fprintf(stdout, "Assignment seek to beginning successfully.\n");
// free the assignment array
ws_tmq_free_assignment(pAssign, numOfAssignment);
// let's consume the messages again
basic_consume_loop(tmq);
}
// ANCHOR_END: consume_repeatly
// ANCHOR: manual_commit
void manual_commit(ws_tmq_t* tmq) {
int32_t totalRows = 0; // total rows consumed
int32_t msgCnt = 0; // total messages consumed
int32_t timeout = 5000; // poll timeout
while (running) {
// poll message from TDengine
WS_RES* tmqmsg = ws_tmq_consumer_poll(tmq, timeout);
if (tmqmsg) {
msgCnt++;
// process the message
totalRows += msg_process(tmqmsg);
// commit the message
int32_t code = ws_tmq_commit_sync(tmq, tmqmsg);
if (code) {
fprintf(stderr,
"Failed to commit offset, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(tmq));
// free the message
ws_free_result(tmqmsg);
break;
} else {
fprintf(stdout, "Commit offset manually successfully.\n");
}
// free the message
ws_free_result(tmqmsg);
}
if (msgCnt > 50) {
// consume 50 messages and break
break;
}
}
// print the result: total messages and total rows consumed
fprintf(stdout, "%d msg consumed, include %d rows.\n", msgCnt, totalRows);
}
// ANCHOR_END: manual_commit
int main(int argc, char* argv[]) {
int32_t code;
pthread_t thread_id;
WS_TAOS* pConn = init_env();
if (pConn == NULL) {
fprintf(stderr, "Failed to init env.\n");
return -1;
}
if (create_topic(pConn) < 0) {
fprintf(stderr, "Failed to create topic.\n");
return -1;
}
if (pthread_create(&thread_id, NULL, &prepare_data, NULL)) {
fprintf(stderr, "Failed to create thread.\n");
return -1;
}
// ANCHOR: create_consumer_2
ws_tmq_t* tmq = build_consumer(&config);
if (NULL == tmq) {
fprintf(stderr, "Failed to create native consumer, host: %s, groupId: %s, , clientId: %s.\n",
config.td_connect_host, config.group_id, config.client_id);
return -1;
} else {
fprintf(stdout, "Create consumer successfully, host: %s, groupId: %s, clientId: %s.\n", config.td_connect_host,
config.group_id, config.client_id);
}
// ANCHOR_END: create_consumer_2
// ANCHOR: subscribe_3
ws_tmq_list_t* topic_list = build_topic_list();
if (NULL == topic_list) {
fprintf(stderr, "Failed to create topic_list, topic: %s, groupId: %s, clientId: %s.\n", topic_name, config.group_id,
config.client_id);
return -1;
}
if ((code = ws_tmq_subscribe(tmq, topic_list))) {
fprintf(stderr,
"Failed to subscribe topic_list, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(tmq));
} else {
fprintf(stdout, "Subscribe topics successfully.\n");
}
ws_tmq_list_destroy(topic_list);
basic_consume_loop(tmq);
// ANCHOR_END: subscribe_3
consume_repeatly(tmq);
manual_commit(tmq);
// ANCHOR: unsubscribe_and_close
// unsubscribe the topic
code = ws_tmq_unsubscribe(tmq);
if (code) {
fprintf(stderr,
"Failed to unsubscribe consumer, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(tmq));
} else {
fprintf(stdout, "Consumer unsubscribed successfully.\n");
}
// close the consumer
code = ws_tmq_consumer_close(tmq);
if (code) {
fprintf(stderr, "Failed to close consumer, topic: %s, groupId: %s, clientId: %s, ErrCode: 0x%x, ErrMessage: %s.\n",
topic_name, config.group_id, config.client_id, code, ws_tmq_errstr(tmq));
} else {
fprintf(stdout, "Consumer closed successfully.\n");
}
// ANCHOR_END: unsubscribe_and_close
thread_stop = 1;
pthread_join(thread_id, NULL);
if (drop_topic(pConn) < 0) {
fprintf(stderr, "Failed to drop topic.\n");
return -1;
}
deinit_env(pConn);
return 0;
}

View File

@ -0,0 +1,71 @@
/*
* Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
*
* This program is free software: you can use, redistribute, and/or modify
* it under the terms of the GNU Affero General Public License, version 3
* or later ("AGPL"), as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
// TAOS standard API example. The same syntax as MySQL, but only a subset
// to compile: gcc -o with_reqid_demo with_reqid_demo.c -ltaos
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "taosws.h"
static int DemoWithReqId() {
// ANCHOR: with_reqid
int code = 0;
char *dsn = "ws://localhost:6041";
// connect
WS_TAOS *taos = ws_connect(dsn);
if (taos == NULL) {
fprintf(stderr, "Failed to connect to %s, ErrCode: 0x%x, ErrMessage: %s.\n", dsn, ws_errno(NULL), ws_errstr(NULL));
return -1;
}
const char *sql = "SELECT ts, current, location FROM power.meters limit 1";
// query data with reqid
long reqid = 3L;
WS_RES *result = ws_query_with_reqid(taos, sql, reqid);
code = ws_errno(result);
if (code != 0) {
fprintf(stderr, "Failed to execute sql withQID: %ld, ErrCode: 0x%x, ErrMessage: %s\n.", reqid, code,
ws_errstr(result));
ws_close(taos);
return -1;
}
WS_ROW row = NULL;
int rows = 0;
int num_fields = ws_field_count(result);
const WS_FIELD *fields = ws_fetch_fields(result);
fprintf(stdout, "query successfully, got %d fields, the sql is: %s.\n", num_fields, sql);
// fetch the records row by row
while ((row = ws_fetch_row(result))) {
// Add your data processing logic here
rows++;
}
fprintf(stdout, "total rows: %d\n", rows);
ws_free_result(result);
// close & clean
ws_close(taos);
return 0;
// ANCHOR_END: with_reqid
}
int main(int argc, char *argv[]) { return DemoWithReqId(); }

View File

@ -0,0 +1,101 @@
PROJECT(TDengine)
IF (TD_LINUX)
INCLUDE_DIRECTORIES(. ${TD_SOURCE_DIR}/src/inc ${TD_SOURCE_DIR}/src/client/inc ${TD_SOURCE_DIR}/inc)
AUX_SOURCE_DIRECTORY(. SRC)
add_executable(docs_connect_example "")
add_executable(docs_create_db_demo "")
add_executable(docs_insert_data_demo "")
add_executable(docs_query_data_demo "")
add_executable(docs_with_reqid_demo "")
add_executable(docs_sml_insert_demo "")
add_executable(docs_stmt_insert_demo "")
add_executable(docs_tmq_demo "")
target_sources(docs_connect_example
PRIVATE
"connect_example.c"
)
target_sources(docs_create_db_demo
PRIVATE
"create_db_demo.c"
)
target_sources(docs_insert_data_demo
PRIVATE
"insert_data_demo.c"
)
target_sources(docs_query_data_demo
PRIVATE
"query_data_demo.c"
)
target_sources(docs_with_reqid_demo
PRIVATE
"with_reqid_demo.c"
)
target_sources(docs_sml_insert_demo
PRIVATE
"sml_insert_demo.c"
)
target_sources(docs_stmt_insert_demo
PRIVATE
"stmt_insert_demo.c"
)
target_sources(docs_tmq_demo
PRIVATE
"tmq_demo.c"
)
target_link_libraries(docs_connect_example
taos
)
target_link_libraries(docs_create_db_demo
taos
)
target_link_libraries(docs_insert_data_demo
taos
)
target_link_libraries(docs_query_data_demo
taos
)
target_link_libraries(docs_with_reqid_demo
taos
)
target_link_libraries(docs_sml_insert_demo
taos
)
target_link_libraries(docs_stmt_insert_demo
taos
)
target_link_libraries(docs_tmq_demo
taos
pthread
)
SET_TARGET_PROPERTIES(docs_connect_example PROPERTIES OUTPUT_NAME docs_connect_example)
SET_TARGET_PROPERTIES(docs_create_db_demo PROPERTIES OUTPUT_NAME docs_create_db_demo)
SET_TARGET_PROPERTIES(docs_insert_data_demo PROPERTIES OUTPUT_NAME docs_insert_data_demo)
SET_TARGET_PROPERTIES(docs_query_data_demo PROPERTIES OUTPUT_NAME docs_query_data_demo)
SET_TARGET_PROPERTIES(docs_with_reqid_demo PROPERTIES OUTPUT_NAME docs_with_reqid_demo)
SET_TARGET_PROPERTIES(docs_sml_insert_demo PROPERTIES OUTPUT_NAME docs_sml_insert_demo)
SET_TARGET_PROPERTIES(docs_stmt_insert_demo PROPERTIES OUTPUT_NAME docs_stmt_insert_demo)
SET_TARGET_PROPERTIES(docs_tmq_demo PROPERTIES OUTPUT_NAME docs_tmq_demo)
ENDIF ()
IF (TD_DARWIN)
INCLUDE_DIRECTORIES(. ${TD_SOURCE_DIR}/src/inc ${TD_SOURCE_DIR}/src/client/inc ${TD_SOURCE_DIR}/inc)
AUX_SOURCE_DIRECTORY(. SRC)
ENDIF ()

34
docs/examples/c/Makefile Normal file
View File

@ -0,0 +1,34 @@
# Makefile for building TDengine examples on TD Linux platform
INCLUDE_DIRS =
TARGETS = connect_example \
create_db_demo \
insert_data_demo \
query_data_demo \
with_reqid_demo \
sml_insert_demo \
stmt_insert_demo \
tmq_demo
SOURCES = connect_example.c \
create_db_demo.c \
insert_data_demo.c \
query_data_demo.c \
with_reqid_demo.c \
sml_insert_demo.c \
stmt_insert_demo.c \
tmq_demo.c
LIBS = -ltaos -lpthread
CFLAGS = -g
all: $(TARGETS)
$(TARGETS):
$(CC) $(CFLAGS) -o $@ $(wildcard $(@F).c) $(LIBS)
clean:
rm -f $(TARGETS)

View File

@ -22,7 +22,7 @@
<dependency>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>3.3.0</version>
<version>3.4.0</version>
</dependency>
<!-- ANCHOR_END: dep-->

View File

@ -3,10 +3,7 @@ package com.taos.example;
import com.taosdata.jdbc.TSDBPreparedStatement;
import com.taosdata.jdbc.utils.StringUtils;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@ -16,15 +13,32 @@ public class ParameterBindingFullDemo {
private static final String host = "127.0.0.1";
private static final Random random = new Random(System.currentTimeMillis());
private static final int BINARY_COLUMN_SIZE = 50;
private static final int BINARY_COLUMN_SIZE = 100;
private static final String[] schemaList = {
"create table stable1(ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint) tags(t1 tinyint, t2 smallint, t3 int, t4 bigint)",
"create table stable2(ts timestamp, f1 float, f2 double) tags(t1 float, t2 double)",
"create table stable3(ts timestamp, f1 bool) tags(t1 bool)",
"create table stable4(ts timestamp, f1 binary(" + BINARY_COLUMN_SIZE + ")) tags(t1 binary(" + BINARY_COLUMN_SIZE + "))",
"create table stable5(ts timestamp, f1 nchar(" + BINARY_COLUMN_SIZE + ")) tags(t1 nchar(" + BINARY_COLUMN_SIZE + "))",
"create table stable6(ts timestamp, f1 varbinary(" + BINARY_COLUMN_SIZE + ")) tags(t1 varbinary(" + BINARY_COLUMN_SIZE + "))",
"create table stable7(ts timestamp, f1 geometry(" + BINARY_COLUMN_SIZE + ")) tags(t1 geometry(" + BINARY_COLUMN_SIZE + "))",
"drop database if exists example_all_type_stmt",
"CREATE DATABASE IF NOT EXISTS example_all_type_stmt",
"USE example_all_type_stmt",
"CREATE STABLE IF NOT EXISTS stb_json (" +
"ts TIMESTAMP, " +
"int_col INT) " +
"tags (json_tag json)",
"CREATE STABLE IF NOT EXISTS stb (" +
"ts TIMESTAMP, " +
"int_col INT, " +
"double_col DOUBLE, " +
"bool_col BOOL, " +
"binary_col BINARY(100), " +
"nchar_col NCHAR(100), " +
"varbinary_col VARBINARY(100), " +
"geometry_col GEOMETRY(100)) " +
"tags (" +
"int_tag INT, " +
"double_tag DOUBLE, " +
"bool_tag BOOL, " +
"binary_tag BINARY(100), " +
"nchar_tag NCHAR(100), " +
"varbinary_tag VARBINARY(100), " +
"geometry_tag GEOMETRY(100))"
};
private static final int numOfSubTable = 10, numOfRow = 10;
@ -34,55 +48,37 @@ public class ParameterBindingFullDemo {
try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) {
init(conn);
stmtJsonTag(conn);
stmtAll(conn);
bindInteger(conn);
bindFloat(conn);
bindBoolean(conn);
bindBytes(conn);
bindString(conn);
bindVarbinary(conn);
bindGeometry(conn);
clean(conn);
} catch (SQLException ex) {
// handle any errors, please refer to the JDBC specifications for detailed exceptions info
System.out.println("Failed to insert to table meters using stmt, url: " + jdbcUrl + "; ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + ex.getMessage());
System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + ex.getMessage());
throw ex;
} catch (Exception ex){
System.out.println("Failed to insert to table meters using stmt, url: " + jdbcUrl + "; ErrMessage: " + ex.getMessage());
} catch (Exception ex) {
System.out.println("Failed to insert data using stmt, ErrMessage: " + ex.getMessage());
throw ex;
}
}
private static void init(Connection conn) throws SQLException {
clean(conn);
try (Statement stmt = conn.createStatement()) {
stmt.execute("create database if not exists test_parabind");
stmt.execute("use test_parabind");
for (int i = 0; i < schemaList.length; i++) {
stmt.execute(schemaList[i]);
}
}
}
private static void clean(Connection conn) throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute("drop database if exists test_parabind");
}
}
private static void bindInteger(Connection conn) throws SQLException {
String sql = "insert into ? using stable1 tags(?,?,?,?) values(?,?,?,?,?)";
private static void stmtJsonTag(Connection conn) throws SQLException {
String sql = "INSERT INTO ? using stb_json tags(?) VALUES (?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t1_" + i);
pstmt.setTableName("ntb_json_" + i);
// set tags
pstmt.setTagByte(0, Byte.parseByte(Integer.toString(random.nextInt(Byte.MAX_VALUE))));
pstmt.setTagShort(1, Short.parseShort(Integer.toString(random.nextInt(Short.MAX_VALUE))));
pstmt.setTagInt(2, random.nextInt(Integer.MAX_VALUE));
pstmt.setTagLong(3, random.nextLong());
pstmt.setTagJson(0, "{\"device\":\"device_" + i + "\"}");
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
@ -90,45 +86,42 @@ public class ParameterBindingFullDemo {
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<Byte> f1List = new ArrayList<>();
ArrayList<Integer> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f1List.add(Byte.parseByte(Integer.toString(random.nextInt(Byte.MAX_VALUE))));
pstmt.setByte(1, f1List);
ArrayList<Short> f2List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f2List.add(Short.parseShort(Integer.toString(random.nextInt(Short.MAX_VALUE))));
pstmt.setShort(2, f2List);
ArrayList<Integer> f3List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f3List.add(random.nextInt(Integer.MAX_VALUE));
pstmt.setInt(3, f3List);
ArrayList<Long> f4List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f4List.add(random.nextLong());
pstmt.setLong(4, f4List);
f1List.add(random.nextInt(Integer.MAX_VALUE));
pstmt.setInt(1, f1List);
// add column
pstmt.columnDataAddBatch();
}
// execute column
pstmt.columnDataExecuteBatch();
System.out.println("Successfully inserted rows to example_all_type_stmt.ntb_json");
}
}
private static void bindFloat(Connection conn) throws SQLException {
String sql = "insert into ? using stable2 tags(?,?) values(?,?,?)";
private static void stmtAll(Connection conn) throws SQLException {
String sql = "INSERT INTO ? using stb tags(?,?,?,?,?,?,?) VALUES (?,?,?,?,?,?,?,?)";
TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class);
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t2_" + i);
pstmt.setTableName("ntb" + i);
// set tags
pstmt.setTagFloat(0, random.nextFloat());
pstmt.setTagDouble(1, random.nextDouble());
pstmt.setTagInt(0, i);
pstmt.setTagDouble(1, 1.1);
pstmt.setTagBoolean(2, true);
pstmt.setTagString(3, "binary_value");
pstmt.setTagNString(4, "nchar_value");
pstmt.setTagVarbinary(5, new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e});
pstmt.setTagGeometry(6, new byte[]{
0x01, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59,
0x40, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59, 0x40});
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
@ -136,190 +129,54 @@ public class ParameterBindingFullDemo {
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<Float> f1List = new ArrayList<>();
ArrayList<Integer> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f1List.add(random.nextFloat());
pstmt.setFloat(1, f1List);
f1List.add(random.nextInt(Integer.MAX_VALUE));
pstmt.setInt(1, f1List);
ArrayList<Double> f2List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f2List.add(random.nextDouble());
pstmt.setDouble(2, f2List);
ArrayList<Boolean> f3List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f3List.add(true);
pstmt.setBoolean(3, f3List);
ArrayList<String> f4List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f4List.add("binary_value");
pstmt.setString(4, f4List, BINARY_COLUMN_SIZE);
ArrayList<String> f5List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f5List.add("nchar_value");
pstmt.setNString(5, f5List, BINARY_COLUMN_SIZE);
ArrayList<byte[]> f6List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f6List.add(new byte[]{(byte) 0x98, (byte) 0xf4, 0x6e});
pstmt.setVarbinary(6, f6List, BINARY_COLUMN_SIZE);
ArrayList<byte[]> f7List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f7List.add(new byte[]{
0x01, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59,
0x40, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59, 0x40});
pstmt.setGeometry(7, f7List, BINARY_COLUMN_SIZE);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
System.out.println("Successfully inserted rows to example_all_type_stmt.ntb");
// close if no try-with-catch statement is used
pstmt.close();
}
private static void bindBoolean(Connection conn) throws SQLException {
String sql = "insert into ? using stable3 tags(?) values(?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t3_" + i);
// set tags
pstmt.setTagBoolean(0, random.nextBoolean());
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++)
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<Boolean> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++)
f1List.add(random.nextBoolean());
pstmt.setBoolean(1, f1List);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
}
}
private static void bindBytes(Connection conn) throws SQLException {
String sql = "insert into ? using stable4 tags(?) values(?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t4_" + i);
// set tags
pstmt.setTagString(0, new String("abc"));
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++)
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<String> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++) {
f1List.add(new String("abc"));
}
pstmt.setString(1, f1List, BINARY_COLUMN_SIZE);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
}
}
private static void bindString(Connection conn) throws SQLException {
String sql = "insert into ? using stable5 tags(?) values(?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t5_" + i);
// set tags
pstmt.setTagNString(0, "California.SanFrancisco");
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++)
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<String> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++) {
f1List.add("California.LosAngeles");
}
pstmt.setNString(1, f1List, BINARY_COLUMN_SIZE);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
}
}
private static void bindVarbinary(Connection conn) throws SQLException {
String sql = "insert into ? using stable6 tags(?) values(?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t6_" + i);
// set tags
byte[] bTag = new byte[]{0,2,3,4,5};
bTag[0] = (byte) i;
pstmt.setTagVarbinary(0, bTag);
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++)
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<byte[]> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++) {
byte[] v = new byte[]{0,2,3,4,5,6};
v[0] = (byte)j;
f1List.add(v);
}
pstmt.setVarbinary(1, f1List, BINARY_COLUMN_SIZE);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
}
}
private static void bindGeometry(Connection conn) throws SQLException {
String sql = "insert into ? using stable7 tags(?) values(?,?)";
try (TSDBPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSDBPreparedStatement.class)) {
byte[] g1 = StringUtils.hexToBytes("0101000000000000000000F03F0000000000000040");
byte[] g2 = StringUtils.hexToBytes("0102000020E610000002000000000000000000F03F000000000000004000000000000008400000000000001040");
List<byte[]> listGeo = new ArrayList<>();
listGeo.add(g1);
listGeo.add(g2);
for (int i = 1; i <= 2; i++) {
// set table name
pstmt.setTableName("t7_" + i);
// set tags
pstmt.setTagGeometry(0, listGeo.get(i - 1));
// set columns
ArrayList<Long> tsList = new ArrayList<>();
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++)
tsList.add(current + j);
pstmt.setTimestamp(0, tsList);
ArrayList<byte[]> f1List = new ArrayList<>();
for (int j = 0; j < numOfRow; j++) {
f1List.add(listGeo.get(i - 1));
}
pstmt.setGeometry(1, f1List, BINARY_COLUMN_SIZE);
// add column
pstmt.columnDataAddBatch();
}
// execute
pstmt.columnDataExecuteBatch();
}
}
}
// ANCHOR_END: para_bind

View File

@ -17,8 +17,8 @@ public class SchemalessWsTest {
private static final String jsonDemo = "{\"metric\": \"metric_json\",\"timestamp\": 1626846400,\"value\": 10.3, \"tags\": {\"groupid\": 2, \"location\": \"California.SanFrancisco\", \"id\": \"d1001\"}}";
public static void main(String[] args) throws SQLException {
final String url = "jdbc:TAOS-RS://" + host + ":6041?user=root&password=taosdata&batchfetch=true";
try(Connection connection = DriverManager.getConnection(url)){
final String url = "jdbc:TAOS-WS://" + host + ":6041?user=root&password=taosdata";
try (Connection connection = DriverManager.getConnection(url)) {
init(connection);
AbstractConnection conn = connection.unwrap(AbstractConnection.class);

View File

@ -12,9 +12,9 @@ public class WSConnectExample {
public static void main(String[] args) throws Exception {
// use
// String jdbcUrl =
// "jdbc:TAOS-RS://localhost:6041/dbName?user=root&password=taosdata&batchfetch=true";
// "jdbc:TAOS-WS://localhost:6041/dbName?user=root&password=taosdata";
// if you want to connect a specified database named "dbName".
String jdbcUrl = "jdbc:TAOS-RS://localhost:6041?user=root&password=taosdata&batchfetch=true";
String jdbcUrl = "jdbc:TAOS-WS://localhost:6041?user=root&password=taosdata";
Properties connProps = new Properties();
connProps.setProperty(TSDBDriver.PROPERTY_KEY_ENABLE_AUTO_RECONNECT, "true");
connProps.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8");

View File

@ -15,7 +15,7 @@ public class WSParameterBindingBasicDemo {
public static void main(String[] args) throws SQLException {
String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true";
String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041";
try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) {
init(conn);
@ -40,7 +40,7 @@ public class WSParameterBindingBasicDemo {
pstmt.setFloat(4, random.nextFloat());
pstmt.addBatch();
}
int [] exeResult = pstmt.executeBatch();
int[] exeResult = pstmt.executeBatch();
// you can check exeResult here
System.out.println("Successfully inserted " + exeResult.length + " rows to power.meters.");
}
@ -60,7 +60,8 @@ public class WSParameterBindingBasicDemo {
try (Statement stmt = conn.createStatement()) {
stmt.execute("CREATE DATABASE IF NOT EXISTS power");
stmt.execute("USE power");
stmt.execute("CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))");
stmt.execute(
"CREATE STABLE IF NOT EXISTS power.meters (ts TIMESTAMP, current FLOAT, voltage INT, phase FLOAT) TAGS (groupId INT, location BINARY(24))");
}
}
}

View File

@ -11,169 +11,127 @@ public class WSParameterBindingFullDemo {
private static final Random random = new Random(System.currentTimeMillis());
private static final int BINARY_COLUMN_SIZE = 30;
private static final String[] schemaList = {
"create table stable1(ts timestamp, f1 tinyint, f2 smallint, f3 int, f4 bigint) tags(t1 tinyint, t2 smallint, t3 int, t4 bigint)",
"create table stable2(ts timestamp, f1 float, f2 double) tags(t1 float, t2 double)",
"create table stable3(ts timestamp, f1 bool) tags(t1 bool)",
"create table stable4(ts timestamp, f1 binary(" + BINARY_COLUMN_SIZE + ")) tags(t1 binary(" + BINARY_COLUMN_SIZE + "))",
"create table stable5(ts timestamp, f1 nchar(" + BINARY_COLUMN_SIZE + ")) tags(t1 nchar(" + BINARY_COLUMN_SIZE + "))"
"drop database if exists example_all_type_stmt",
"CREATE DATABASE IF NOT EXISTS example_all_type_stmt",
"USE example_all_type_stmt",
"CREATE STABLE IF NOT EXISTS stb_json (" +
"ts TIMESTAMP, " +
"int_col INT) " +
"tags (json_tag json)",
"CREATE STABLE IF NOT EXISTS stb (" +
"ts TIMESTAMP, " +
"int_col INT, " +
"double_col DOUBLE, " +
"bool_col BOOL, " +
"binary_col BINARY(100), " +
"nchar_col NCHAR(100), " +
"varbinary_col VARBINARY(100), " +
"geometry_col GEOMETRY(100)) " +
"tags (" +
"int_tag INT, " +
"double_tag DOUBLE, " +
"bool_tag BOOL, " +
"binary_tag BINARY(100), " +
"nchar_tag NCHAR(100), " +
"varbinary_tag VARBINARY(100), " +
"geometry_tag GEOMETRY(100))"
};
private static final int numOfSubTable = 10, numOfRow = 10;
public static void main(String[] args) throws SQLException {
String jdbcUrl = "jdbc:TAOS-RS://" + host + ":6041/?batchfetch=true";
String jdbcUrl = "jdbc:TAOS-WS://" + host + ":6041/";
try (Connection conn = DriverManager.getConnection(jdbcUrl, "root", "taosdata")) {
init(conn);
bindInteger(conn);
stmtJsonTag(conn);
bindFloat(conn);
bindBoolean(conn);
bindBytes(conn);
bindString(conn);
stmtAll(conn);
} catch (SQLException ex) {
// handle any errors, please refer to the JDBC specifications for detailed exceptions info
System.out.println("Failed to insert to table meters using stmt, url: " + jdbcUrl + "; ErrCode:" + ex.getErrorCode() + "; ErrMessage: " + ex.getMessage());
// handle any errors, please refer to the JDBC specifications for detailed
// exceptions info
System.out.println("Failed to insert data using stmt, ErrCode:" + ex.getErrorCode() + "; ErrMessage: "
+ ex.getMessage());
throw ex;
} catch (Exception ex){
System.out.println("Failed to insert to table meters using stmt, url: " + jdbcUrl + "; ErrMessage: " + ex.getMessage());
} catch (Exception ex) {
System.out.println("Failed to insert data using stmt, ErrMessage: " + ex.getMessage());
throw ex;
}
}
private static void init(Connection conn) throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute("drop database if exists test_ws_parabind");
stmt.execute("create database if not exists test_ws_parabind");
stmt.execute("use test_ws_parabind");
for (int i = 0; i < schemaList.length; i++) {
stmt.execute(schemaList[i]);
}
}
}
private static void bindInteger(Connection conn) throws SQLException {
String sql = "insert into ? using stable1 tags(?,?,?,?) values(?,?,?,?,?)";
private static void stmtJsonTag(Connection conn) throws SQLException {
String sql = "INSERT INTO ? using stb_json tags(?) VALUES (?,?)";
try (TSWSPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSWSPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t1_" + i);
pstmt.setTableName("ntb_json_" + i);
// set tags
pstmt.setTagByte(1, Byte.parseByte(Integer.toString(random.nextInt(Byte.MAX_VALUE))));
pstmt.setTagShort(2, Short.parseShort(Integer.toString(random.nextInt(Short.MAX_VALUE))));
pstmt.setTagInt(3, random.nextInt(Integer.MAX_VALUE));
pstmt.setTagLong(4, random.nextLong());
pstmt.setTagJson(1, "{\"device\":\"device_" + i + "\"}");
// set columns
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++) {
pstmt.setTimestamp(1, new Timestamp(current + j));
pstmt.setByte(2, Byte.parseByte(Integer.toString(random.nextInt(Byte.MAX_VALUE))));
pstmt.setShort(3, Short.parseShort(Integer.toString(random.nextInt(Short.MAX_VALUE))));
pstmt.setInt(4, random.nextInt(Integer.MAX_VALUE));
pstmt.setLong(5, random.nextLong());
pstmt.setInt(2, j);
pstmt.addBatch();
}
pstmt.executeBatch();
}
System.out.println("Successfully inserted rows to example_all_type_stmt.ntb_json");
}
}
private static void bindFloat(Connection conn) throws SQLException {
String sql = "insert into ? using stable2 tags(?,?) values(?,?,?)";
try(TSWSPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSWSPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t2_" + i);
// set tags
pstmt.setTagFloat(1, random.nextFloat());
pstmt.setTagDouble(2, random.nextDouble());
// set columns
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++) {
pstmt.setTimestamp(1, new Timestamp(current + j));
pstmt.setFloat(2, random.nextFloat());
pstmt.setDouble(3, random.nextDouble());
pstmt.addBatch();
}
pstmt.executeBatch();
}
}
}
private static void bindBoolean(Connection conn) throws SQLException {
String sql = "insert into ? using stable3 tags(?) values(?,?)";
try (TSWSPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSWSPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t3_" + i);
// set tags
pstmt.setTagBoolean(1, random.nextBoolean());
// set columns
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++) {
pstmt.setTimestamp(1, new Timestamp(current + j));
pstmt.setBoolean(2, random.nextBoolean());
pstmt.addBatch();
}
pstmt.executeBatch();
}
}
}
private static void bindBytes(Connection conn) throws SQLException {
String sql = "insert into ? using stable4 tags(?) values(?,?)";
private static void stmtAll(Connection conn) throws SQLException {
String sql = "INSERT INTO ? using stb tags(?,?,?,?,?,?,?) VALUES (?,?,?,?,?,?,?,?)";
try (TSWSPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSWSPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t4_" + i);
// set tags
pstmt.setTagString(1, new String("abc"));
// set table name
pstmt.setTableName("ntb");
// set tags
pstmt.setTagInt(1, 1);
pstmt.setTagDouble(2, 1.1);
pstmt.setTagBoolean(3, true);
pstmt.setTagString(4, "binary_value");
pstmt.setTagNString(5, "nchar_value");
pstmt.setTagVarbinary(6, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e });
pstmt.setTagGeometry(7, new byte[] {
0x01, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59,
0x40, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59, 0x40 });
// set columns
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++) {
pstmt.setTimestamp(1, new Timestamp(current + j));
pstmt.setString(2, "abc");
pstmt.addBatch();
}
pstmt.executeBatch();
}
}
}
long current = System.currentTimeMillis();
private static void bindString(Connection conn) throws SQLException {
String sql = "insert into ? using stable5 tags(?) values(?,?)";
try (TSWSPreparedStatement pstmt = conn.prepareStatement(sql).unwrap(TSWSPreparedStatement.class)) {
for (int i = 1; i <= numOfSubTable; i++) {
// set table name
pstmt.setTableName("t5_" + i);
// set tags
pstmt.setTagNString(1, "California.SanFrancisco");
// set columns
long current = System.currentTimeMillis();
for (int j = 0; j < numOfRow; j++) {
pstmt.setTimestamp(0, new Timestamp(current + j));
pstmt.setNString(1, "California.SanFrancisco");
pstmt.addBatch();
}
pstmt.executeBatch();
}
pstmt.setTimestamp(1, new Timestamp(current));
pstmt.setInt(2, 1);
pstmt.setDouble(3, 1.1);
pstmt.setBoolean(4, true);
pstmt.setString(5, "binary_value");
pstmt.setNString(6, "nchar_value");
pstmt.setVarbinary(7, new byte[] { (byte) 0x98, (byte) 0xf4, 0x6e });
pstmt.setGeometry(8, new byte[] {
0x01, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59,
0x40, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x59, 0x40 });
pstmt.addBatch();
pstmt.executeBatch();
System.out.println("Successfully inserted rows to example_all_type_stmt.ntb");
}
}
}

View File

@ -50,36 +50,68 @@ public class TestAll {
}
@Test
public void testRestInsert() throws SQLException {
dropDB("power");
RestInsertExample.main(args);
RestQueryExample.main(args);
public void testWsConnect() throws Exception {
WSConnectExample.main(args);
}
@Test
public void testStmtInsert() throws SQLException {
public void testBase() throws Exception {
JdbcCreatDBDemo.main(args);
JdbcInsertDataDemo.main(args);
JdbcQueryDemo.main(args);
dropDB("power");
StmtInsertExample.main(args);
}
@Test
public void testSubscribe() {
public void testWsSchemaless() throws Exception {
dropDB("power");
SchemalessWsTest.main(args);
}
@Test
public void testJniSchemaless() throws Exception {
dropDB("power");
SchemalessJniTest.main(args);
}
@Test
public void testJniStmtBasic() throws Exception {
dropDB("power");
ParameterBindingBasicDemo.main(args);
}
@Test
public void testJniStmtFull() throws Exception {
dropDB("power");
ParameterBindingFullDemo.main(args);
}
@Test
public void testWsStmtBasic() throws Exception {
dropDB("power");
WSParameterBindingBasicDemo.main(args);
}
@Test
public void testWsStmtFull() throws Exception {
dropDB("power");
WSParameterBindingFullDemo.main(args);
}
@Test
public void testConsumer() throws Exception {
dropDB("power");
SubscribeDemo.main(args);
}
@Test
public void testSubscribeOverWebsocket() {
WebsocketSubscribeDemo.main(args);
}
@Test
public void testSchemaless() throws SQLException {
LineProtocolExample.main(args);
TelnetLineProtocolExample.main(args);
// for json protocol, tags may be double type. but for telnet protocol tag must be nchar type.
// To avoid type mismatch, we delete database test.
dropDB("test");
JSONProtocolExample.main(args);
}
// @Test
// public void testSubscribeJni() throws SQLException, InterruptedException {
// dropDB("power");
// ConsumerLoopFull.main(args);
// }
// @Test
// public void testSubscribeWs() throws SQLException, InterruptedException {
// dropDB("power");
// WsConsumerLoopFull.main(args);
// }
}

View File

@ -4,7 +4,6 @@
"main": "index.js",
"license": "MIT",
"dependencies": {
"@tdengine/client": "^3.0.1",
"@tdengine/rest": "^3.0.0"
"@tdengine/websocket": "^3.1.0"
}
}

View File

@ -0,0 +1,100 @@
const taos = require("@tdengine/websocket");
let dsn = 'ws://localhost:6041';
async function json_tag_example() {
let wsSql = null;
try {
let conf = new taos.WSConfig(dsn);
conf.setUser('root');
conf.setPwd('taosdata');
wsSql = await taos.sqlConnect(conf);
console.log("Connected to " + dsn + " successfully.");
// create database
await wsSql.exec('CREATE DATABASE IF NOT EXISTS example_json_tag');
console.log("Create database example_json_tag successfully.");
// create table
await wsSql.exec('create table if not exists example_json_tag.stb (ts timestamp, v int) tags(jt json)');
console.log("Create stable example_json_tag.stb successfully");
let insertQuery = 'INSERT INTO ' +
'example_json_tag.tb1 USING example_json_tag.stb TAGS(\'{"name":"value"}\') ' +
"values(now, 1) ";
taosResult = await wsSql.exec(insertQuery);
console.log("Successfully inserted " + taosResult.getAffectRows() + " rows to example_json_tag.stb.");
let sql = 'SELECT ts, v, jt FROM example_json_tag.stb limit 100';
wsRows = await wsSql.query(sql);
while (await wsRows.next()) {
let row = wsRows.getData();
console.log('ts: ' + row[0] + ', v: ' + row[1] + ', jt: ' + row[2]);
}
} catch (err) {
console.error(`Failed to create database example_json_tag or stable stb, ErrCode: ${err.code}, ErrMessage: ${err.message}`);
throw err;
} finally {
if (wsSql) {
await wsSql.close();
}
}
}
async function all_type_example() {
let wsSql = null;
try {
let conf = new taos.WSConfig(dsn);
conf.setUser('root');
conf.setPwd('taosdata');
wsSql = await taos.sqlConnect(conf);
console.log("Connected to " + dsn + " successfully.");
// create database
await wsSql.exec('CREATE DATABASE IF NOT EXISTS all_type_example');
console.log("Create database all_type_example successfully.");
// create table
await wsSql.exec('create table if not exists all_type_example.stb (ts timestamp, ' +
'int_col INT, double_col DOUBLE, bool_col BOOL, binary_col BINARY(100),' +
'nchar_col NCHAR(100), varbinary_col VARBINARY(100), geometry_col GEOMETRY(100)) ' +
'tags(int_tag INT, double_tag DOUBLE, bool_tag BOOL, binary_tag BINARY(100),' +
'nchar_tag NCHAR(100), varbinary_tag VARBINARY(100), geometry_tag GEOMETRY(100));');
console.log("Create stable all_type_example.stb successfully");
let insertQuery = "INSERT INTO all_type_example.tb1 using all_type_example.stb "
+ "tags(1, 1.1, true, 'binary_value', 'nchar_value', '\\x98f46e', 'POINT(100 100)') "
+ "values(now, 1, 1.1, true, 'binary_value', 'nchar_value', '\\x98f46e', 'POINT(100 100)')";
taosResult = await wsSql.exec(insertQuery);
console.log("Successfully inserted " + taosResult.getAffectRows() + " rows to all_type_example.stb.");
let sql = 'SELECT * FROM all_type_example.stb limit 100';
let wsRows = await wsSql.query(sql);
let meta = wsRows.getMeta();
console.log("wsRow:meta:=>", meta);
while (await wsRows.next()) {
let row = wsRows.getData();
console.log(row);
}
} catch (err) {
console.error(`Failed to create database all_type_example or stable stb, ErrCode: ${err.code}, ErrMessage: ${err.message}`);
throw err;
} finally {
if (wsSql) {
await wsSql.close();
}
}
}
async function test() {
await json_tag_example()
await all_type_example()
taos.destroy();
}
test()

View File

@ -0,0 +1,148 @@
const taos = require("@tdengine/websocket");
let dsn = 'ws://localhost:6041';
async function json_tag_example() {
let wsSql = null;
try {
let conf = new taos.WSConfig(dsn);
conf.setUser('root');
conf.setPwd('taosdata');
wsSql = await taos.sqlConnect(conf);
console.log("Connected to " + dsn + " successfully.");
// create database
await wsSql.exec('CREATE DATABASE IF NOT EXISTS example_json_tag');
console.log("Create database example_json_tag successfully.");
await wsSql.exec('use example_json_tag');
// create table
await wsSql.exec('create table if not exists stb (ts timestamp, v int) tags(jt json)');
console.log("Create stable example_json_tag.stb successfully");
let stmt = await wsSql.stmtInit();
await stmt.prepare("INSERT INTO ? using stb tags(?) VALUES (?,?)");
await stmt.setTableName(`tb1`);
let tagParams = stmt.newStmtParam();
tagParams.setJson(['{"name":"value"}'])
await stmt.setTags(tagParams);
let bindParams = stmt.newStmtParam();
const currentMillis = new Date().getTime();
bindParams.setTimestamp([currentMillis]);
bindParams.setInt([1]);
await stmt.bind(bindParams);
await stmt.batch();
await stmt.exec();
await stmt.close();
let sql = 'SELECT ts, v, jt FROM example_json_tag.stb limit 100';
wsRows = await wsSql.query(sql);
while (await wsRows.next()) {
let row = wsRows.getData();
console.log('ts: ' + row[0] + ', v: ' + row[1] + ', jt: ' + row[2]);
}
} catch (err) {
console.error(`Failed to create database example_json_tag or stable stb, ErrCode: ${err.code}, ErrMessage: ${err.message}`);
throw err
} finally {
if (wsSql) {
await wsSql.close();
}
}
}
async function all_type_example() {
let wsSql = null;
let stmt = null;
try {
let conf = new taos.WSConfig(dsn);
conf.setUser('root');
conf.setPwd('taosdata');
wsSql = await taos.sqlConnect(conf);
console.log("Connected to " + dsn + " successfully.");
// create database
await wsSql.exec('CREATE DATABASE IF NOT EXISTS all_type_example');
console.log("Create database all_type_example successfully.");
await wsSql.exec('use all_type_example');
// create table
await wsSql.exec('create table if not exists stb (ts timestamp, ' +
'int_col INT, double_col DOUBLE, bool_col BOOL, binary_col BINARY(100),' +
'nchar_col NCHAR(100), varbinary_col VARBINARY(100), geometry_col GEOMETRY(100)) ' +
'tags(int_tag INT, double_tag DOUBLE, bool_tag BOOL, binary_tag BINARY(100),' +
'nchar_tag NCHAR(100), varbinary_tag VARBINARY(100), geometry_tag GEOMETRY(100));');
console.log("Create stable all_type_example.stb successfully");
let geometryData = new Uint8Array([0x01,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
0x00,0x00,0x00,0x59,0x40,0x00,0x00,0x00,0x00,0x00,0x00,0x59,0x40,]).buffer;
const encoder = new TextEncoder();
let vbData = encoder.encode(`Hello, world!`).buffer;
stmt = await wsSql.stmtInit();
await stmt.prepare("INSERT INTO ? using stb tags(?,?,?,?,?,?,?) VALUES (?,?,?,?,?,?,?,?)");
await stmt.setTableName(`tb1`);
let tagParams = stmt.newStmtParam();
tagParams.setInt([1]);
tagParams.setDouble([1.1]);
tagParams.setBoolean([true]);
tagParams.setVarchar(["hello"]);
tagParams.setNchar(["stmt"]);
tagParams.setGeometry([geometryData]);
tagParams.setVarBinary([vbData]);
await stmt.setTags(tagParams);
let bindParams = stmt.newStmtParam();
const currentMillis = new Date().getTime();
bindParams.setTimestamp([currentMillis]);
bindParams.setInt([1]);
bindParams.setDouble([1.1]);
bindParams.setBoolean([true]);
bindParams.setVarchar(["hello"]);
bindParams.setNchar(["stmt"]);
bindParams.setGeometry([geometryData]);
bindParams.setVarBinary([vbData]);
await stmt.bind(bindParams);
await stmt.batch();
await stmt.exec();
let sql = 'SELECT * FROM all_type_example.stb limit 100';
let wsRows = await wsSql.query(sql);
let meta = wsRows.getMeta();
console.log("wsRow:meta:=>", meta);
while (await wsRows.next()) {
let row = wsRows.getData();
console.log(row);
}
} catch (err) {
console.error(`Failed to create database all_type_example or stable stb, ErrCode: ${err.code}, ErrMessage: ${err.message}`);
throw err;
} finally {
if (stmt) {
await stmt.close();
}
if (wsSql) {
await wsSql.close();
}
}
}
async function test() {
await json_tag_example()
await all_type_example()
taos.destroy();
}
test()

Some files were not shown because too many files have changed in this diff Show More