Merge branch 'develop' into feature/TD-4038
This commit is contained in:
commit
49948f2683
|
@ -1,30 +1,49 @@
|
||||||
version: 1.0.{build}
|
version: 1.0.{build}
|
||||||
os: Visual Studio 2015
|
image:
|
||||||
|
- Visual Studio 2015
|
||||||
|
- macos
|
||||||
environment:
|
environment:
|
||||||
matrix:
|
matrix:
|
||||||
- ARCH: amd64
|
- ARCH: amd64
|
||||||
- ARCH: x86
|
- ARCH: x86
|
||||||
|
matrix:
|
||||||
|
exclude:
|
||||||
|
- image: macos
|
||||||
|
ARCH: x86
|
||||||
|
for:
|
||||||
|
-
|
||||||
|
matrix:
|
||||||
|
only:
|
||||||
|
- image: Visual Studio 2015
|
||||||
|
clone_folder: c:\dev\TDengine
|
||||||
|
clone_depth: 1
|
||||||
|
|
||||||
clone_folder: c:\dev\TDengine
|
init:
|
||||||
clone_depth: 1
|
|
||||||
|
|
||||||
init:
|
|
||||||
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %ARCH%
|
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %ARCH%
|
||||||
|
|
||||||
before_build:
|
before_build:
|
||||||
- cd c:\dev\TDengine
|
- cd c:\dev\TDengine
|
||||||
- md build
|
- md build
|
||||||
|
|
||||||
build_script:
|
build_script:
|
||||||
- cd build
|
- cd build
|
||||||
- cmake -G "NMake Makefiles" ..
|
- cmake -G "NMake Makefiles" ..
|
||||||
- nmake install
|
- nmake install
|
||||||
|
-
|
||||||
|
matrix:
|
||||||
|
only:
|
||||||
|
- image: macos
|
||||||
|
clone_depth: 1
|
||||||
|
|
||||||
|
build_script:
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake .. > /dev/null
|
||||||
|
- make > /dev/null
|
||||||
notifications:
|
notifications:
|
||||||
- provider: Email
|
- provider: Email
|
||||||
to:
|
to:
|
||||||
- sangshuduo@gmail.com
|
- sangshuduo@gmail.com
|
||||||
|
|
||||||
on_build_success: true
|
on_build_success: true
|
||||||
on_build_failure: true
|
on_build_failure: true
|
||||||
on_build_status_changed: true
|
on_build_status_changed: true
|
||||||
|
|
|
@ -0,0 +1,180 @@
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: test_amd64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: smoke_test
|
||||||
|
image: python:3.8
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y cmake build-essential gcc
|
||||||
|
- pip3 install psutil
|
||||||
|
- pip3 install guppy3
|
||||||
|
- pip3 install src/connector/python/linux/python3/
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake ..
|
||||||
|
- make
|
||||||
|
- cd ../tests
|
||||||
|
- ./test-all.sh smoke
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
|
||||||
|
- name: crash_gen
|
||||||
|
image: python:3.8
|
||||||
|
commands:
|
||||||
|
- pip3 install requests
|
||||||
|
- pip3 install src/connector/python/linux/python3/
|
||||||
|
- pip3 install psutil
|
||||||
|
- pip3 install guppy3
|
||||||
|
- cd tests/pytest
|
||||||
|
- ./crash_gen.sh -a -p -t 4 -s 2000
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: test_arm64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: gcc
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y cmake build-essential
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake .. -DCPUTYPE=aarch64 > /dev/null
|
||||||
|
- make
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: test_arm
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: arm32v7/ubuntu:bionic
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y cmake build-essential
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake .. -DCPUTYPE=aarch32 > /dev/null
|
||||||
|
- make
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build_trusty
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: ubuntu:trusty
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y gcc cmake3 build-essential git binutils-2.26
|
||||||
|
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake ..
|
||||||
|
- make
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build_xenial
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: ubuntu:xenial
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y gcc cmake build-essential
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake ..
|
||||||
|
- make
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build_bionic
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: ubuntu:bionic
|
||||||
|
commands:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y gcc cmake build-essential
|
||||||
|
- mkdir debug
|
||||||
|
- cd debug
|
||||||
|
- cmake ..
|
||||||
|
- make
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: goodbye
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: 64-bit
|
||||||
|
image: alpine
|
||||||
|
commands:
|
||||||
|
- echo 64-bit is good.
|
||||||
|
when:
|
||||||
|
branch:
|
||||||
|
- develop
|
||||||
|
- master
|
||||||
|
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- test_arm64
|
||||||
|
- test_amd64
|
298
.travis.yml
298
.travis.yml
|
@ -1,298 +0,0 @@
|
||||||
#
|
|
||||||
# Configuration
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Build Matrix
|
|
||||||
#
|
|
||||||
branches:
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
- develop
|
|
||||||
- coverity_scan
|
|
||||||
- /^.*ci-.*$/
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
- os: linux
|
|
||||||
dist: bionic
|
|
||||||
language: c
|
|
||||||
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
compiler: gcc
|
|
||||||
env: DESC="linux/gcc build and test"
|
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
- build-essential
|
|
||||||
- cmake
|
|
||||||
- net-tools
|
|
||||||
- python3.8
|
|
||||||
- libc6-dbg
|
|
||||||
- valgrind
|
|
||||||
- psmisc
|
|
||||||
- unixodbc
|
|
||||||
- unixodbc-dev
|
|
||||||
- mono-complete
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export TZ=Asia/Harbin
|
|
||||||
- date
|
|
||||||
- curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && python3.8 get-pip.py
|
|
||||||
- python3.8 -m pip install --upgrade pip setuptools
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
|
|
||||||
script:
|
|
||||||
- cmake .. > /dev/null
|
|
||||||
- make > /dev/null
|
|
||||||
|
|
||||||
after_success:
|
|
||||||
- travis_wait 20
|
|
||||||
- |-
|
|
||||||
case $TRAVIS_OS_NAME in
|
|
||||||
linux)
|
|
||||||
cd ${TRAVIS_BUILD_DIR}/debug
|
|
||||||
make install > /dev/null || travis_terminate $?
|
|
||||||
|
|
||||||
py3ver=`python3 --version|awk '{print $2}'|cut -d "." -f 1,2` && apt install python$py3ver-dev
|
|
||||||
pip3 install psutil
|
|
||||||
pip3 install guppy3
|
|
||||||
pip3 install --user ${TRAVIS_BUILD_DIR}/src/connector/python/linux/python3/
|
|
||||||
|
|
||||||
cd ${TRAVIS_BUILD_DIR}/tests/examples/C#/taosdemo
|
|
||||||
mcs -out:taosdemo *.cs || travis_terminate $?
|
|
||||||
pkill -TERM -x taosd
|
|
||||||
fuser -k -n tcp 6030
|
|
||||||
sleep 1
|
|
||||||
${TRAVIS_BUILD_DIR}/debug/build/bin/taosd -c ${TRAVIS_BUILD_DIR}/debug/test/cfg > /dev/null &
|
|
||||||
sleep 5
|
|
||||||
mono taosdemo -Q DEFAULT -y || travis_terminate $?
|
|
||||||
pkill -KILL -x taosd
|
|
||||||
fuser -k -n tcp 6030
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
cd ${TRAVIS_BUILD_DIR}/tests
|
|
||||||
./test-all.sh smoke || travis_terminate $?
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
cd ${TRAVIS_BUILD_DIR}/tests/pytest
|
|
||||||
pkill -TERM -x taosd
|
|
||||||
fuser -k -n tcp 6030
|
|
||||||
sleep 1
|
|
||||||
./crash_gen.sh -a -p -t 4 -s 2000|| travis_terminate $?
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
cd ${TRAVIS_BUILD_DIR}/tests/pytest
|
|
||||||
./valgrind-test.sh 2>&1 > mem-error-out.log
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
|
|
||||||
# Color setting
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[1;32m'
|
|
||||||
GREEN_DARK='\033[0;32m'
|
|
||||||
GREEN_UNDERLINE='\033[4;32m'
|
|
||||||
NC='\033[0m'
|
|
||||||
|
|
||||||
grep 'start to execute\|ERROR SUMMARY' mem-error-out.log|grep -v 'grep'|uniq|tee uniq-mem-error-out.log
|
|
||||||
|
|
||||||
for memError in `grep 'ERROR SUMMARY' uniq-mem-error-out.log | awk '{print $4}'`
|
|
||||||
do
|
|
||||||
if [ -n "$memError" ]; then
|
|
||||||
if [ "$memError" -gt 12 ]; then
|
|
||||||
echo -e "${RED} ## Memory errors number valgrind reports is $memError.\
|
|
||||||
More than our threshold! ## ${NC}"
|
|
||||||
travis_terminate $memError
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
grep 'start to execute\|definitely lost:' mem-error-out.log|grep -v 'grep'|uniq|tee uniq-definitely-lost-out.log
|
|
||||||
for defiMemError in `grep 'definitely lost:' uniq-definitely-lost-out.log | awk '{print $7}'`
|
|
||||||
do
|
|
||||||
if [ -n "$defiMemError" ]; then
|
|
||||||
if [ "$defiMemError" -gt 13 ]; then
|
|
||||||
echo -e "${RED} ## Memory errors number valgrind reports \
|
|
||||||
Definitely lost is $defiMemError. More than our threshold! ## ${NC}"
|
|
||||||
travis_terminate $defiMemError
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
- os: linux
|
|
||||||
dist: bionic
|
|
||||||
language: c
|
|
||||||
compiler: gcc
|
|
||||||
env: COVERITY_SCAN=true
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
script:
|
|
||||||
- echo "this job is for coverity scan"
|
|
||||||
|
|
||||||
addons:
|
|
||||||
coverity_scan:
|
|
||||||
# GitHub project metadata
|
|
||||||
# ** specific to your project **
|
|
||||||
project:
|
|
||||||
name: TDengine
|
|
||||||
version: 2.x
|
|
||||||
description: TDengine
|
|
||||||
|
|
||||||
# Where email notification of build analysis results will be sent
|
|
||||||
notification_email: sdsang@taosdata.com, slguan@taosdata.com
|
|
||||||
|
|
||||||
# Commands to prepare for build_command
|
|
||||||
# ** likely specific to your build **
|
|
||||||
build_command_prepend: cmake . > /dev/null
|
|
||||||
|
|
||||||
# The command that will be added as an argument to "cov-build" to compile your project for analysis,
|
|
||||||
# ** likely specific to your build **
|
|
||||||
build_command: make
|
|
||||||
|
|
||||||
# Pattern to match selecting branches that will run analysis. We recommend leaving this set to 'coverity_scan'.
|
|
||||||
# Take care in resource usage, and consider the build frequency allowances per
|
|
||||||
# https://scan.coverity.com/faq#frequency
|
|
||||||
branch_pattern: coverity_scan
|
|
||||||
|
|
||||||
- os: linux
|
|
||||||
dist: trusty
|
|
||||||
language: c
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
- build-essential
|
|
||||||
- cmake
|
|
||||||
- binutils-2.26
|
|
||||||
- unixodbc
|
|
||||||
- unixodbc-dev
|
|
||||||
env:
|
|
||||||
- DESC="trusty/gcc-4.8/bintuils-2.26 build"
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export TZ=Asia/Harbin
|
|
||||||
- date
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
|
|
||||||
script:
|
|
||||||
- cmake .. > /dev/null
|
|
||||||
- export PATH=/usr/lib/binutils-2.26/bin:$PATH && make
|
|
||||||
|
|
||||||
- os: linux
|
|
||||||
dist: bionic
|
|
||||||
language: c
|
|
||||||
compiler: clang
|
|
||||||
env: DESC="linux/clang build"
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
- build-essential
|
|
||||||
- cmake
|
|
||||||
- unixodbc
|
|
||||||
- unixodbc-dev
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export TZ=Asia/Harbin
|
|
||||||
- date
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
|
|
||||||
script:
|
|
||||||
- cmake .. > /dev/null
|
|
||||||
- make > /dev/null
|
|
||||||
|
|
||||||
- os: linux
|
|
||||||
arch: arm64
|
|
||||||
dist: bionic
|
|
||||||
language: c
|
|
||||||
compiler: clang
|
|
||||||
env: DESC="arm64 linux/clang build"
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
- build-essential
|
|
||||||
- cmake
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export TZ=Asia/Harbin
|
|
||||||
- date
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
|
|
||||||
script:
|
|
||||||
- if [ "${TRAVIS_CPU_ARCH}" == "arm64" ]; then
|
|
||||||
cmake .. -DCPUTYPE=aarch64 > /dev/null;
|
|
||||||
else
|
|
||||||
cmake .. > /dev/null;
|
|
||||||
fi
|
|
||||||
- make > /dev/null
|
|
||||||
|
|
||||||
- os: linux
|
|
||||||
arch: arm64
|
|
||||||
dist: xenial
|
|
||||||
language: c
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
|
|
||||||
addons:
|
|
||||||
apt:
|
|
||||||
packages:
|
|
||||||
- build-essential
|
|
||||||
- cmake
|
|
||||||
- unixodbc
|
|
||||||
- unixodbc-dev
|
|
||||||
env:
|
|
||||||
- DESC="arm64 xenial build"
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- export TZ=Asia/Harbin
|
|
||||||
- date
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
|
|
||||||
script:
|
|
||||||
- if [ "${TRAVIS_CPU_ARCH}" == "arm64" ]; then
|
|
||||||
cmake .. -DCPUTYPE=aarch64 > /dev/null;
|
|
||||||
else
|
|
||||||
cmake .. > /dev/null;
|
|
||||||
fi
|
|
||||||
- make > /dev/null
|
|
||||||
|
|
||||||
- os: osx
|
|
||||||
osx_image: xcode11.4
|
|
||||||
language: c
|
|
||||||
compiler: clang
|
|
||||||
env: DESC="mac/clang build"
|
|
||||||
git:
|
|
||||||
- depth: 1
|
|
||||||
addons:
|
|
||||||
homebrew:
|
|
||||||
- cmake
|
|
||||||
- unixodbc
|
|
||||||
|
|
||||||
script:
|
|
||||||
- cd ${TRAVIS_BUILD_DIR}
|
|
||||||
- mkdir debug
|
|
||||||
- cd debug
|
|
||||||
- cmake .. > /dev/null
|
|
||||||
- make > /dev/null
|
|
|
@ -4,7 +4,7 @@ PROJECT(TDengine)
|
||||||
IF (DEFINED VERNUMBER)
|
IF (DEFINED VERNUMBER)
|
||||||
SET(TD_VER_NUMBER ${VERNUMBER})
|
SET(TD_VER_NUMBER ${VERNUMBER})
|
||||||
ELSE ()
|
ELSE ()
|
||||||
SET(TD_VER_NUMBER "2.0.20.0")
|
SET(TD_VER_NUMBER "2.1.0.0")
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
||||||
IF (DEFINED VERCOMPATIBLE)
|
IF (DEFINED VERCOMPATIBLE)
|
||||||
|
|
|
@ -16,7 +16,6 @@ TDengine 的 JDBC 驱动实现尽可能与关系型数据库驱动保持一致
|
||||||
|
|
||||||
* TDengine 目前不支持针对单条数据记录的删除操作。
|
* TDengine 目前不支持针对单条数据记录的删除操作。
|
||||||
* 目前不支持事务操作。
|
* 目前不支持事务操作。
|
||||||
* 目前不支持表间的 union 操作。
|
|
||||||
* 目前不支持嵌套查询(nested query)。
|
* 目前不支持嵌套查询(nested query)。
|
||||||
* 对每个 Connection 的实例,至多只能有一个打开的 ResultSet 实例;如果在 ResultSet 还没关闭的情况下执行了新的查询,taos-jdbcdriver 会自动关闭上一个 ResultSet。
|
* 对每个 Connection 的实例,至多只能有一个打开的 ResultSet 实例;如果在 ResultSet 还没关闭的情况下执行了新的查询,taos-jdbcdriver 会自动关闭上一个 ResultSet。
|
||||||
|
|
||||||
|
@ -447,7 +446,7 @@ Query OK, 1 row(s) in set (0.000141s)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## TAOS-JDBCDriver 版本以及支持的 TDengine 版本和 JDK 版本
|
## <a class="anchor" id="version"></a>TAOS-JDBCDriver 版本以及支持的 TDengine 版本和 JDK 版本
|
||||||
|
|
||||||
| taos-jdbcdriver 版本 | TDengine 版本 | JDK 版本 |
|
| taos-jdbcdriver 版本 | TDengine 版本 | JDK 版本 |
|
||||||
| -------------------- | ----------------- | -------- |
|
| -------------------- | ----------------- | -------- |
|
||||||
|
|
|
@ -144,7 +144,7 @@ TDengine集群中加入一个新的dnode时,涉及集群相关的一些参数
|
||||||
- numOfMnodes:系统中管理节点个数。默认值:3。
|
- numOfMnodes:系统中管理节点个数。默认值:3。
|
||||||
- balance:是否启动负载均衡。0:否,1:是。默认值:1。
|
- balance:是否启动负载均衡。0:否,1:是。默认值:1。
|
||||||
- mnodeEqualVnodeNum: 一个mnode等同于vnode消耗的个数。默认值:4。
|
- mnodeEqualVnodeNum: 一个mnode等同于vnode消耗的个数。默认值:4。
|
||||||
- offlineThreshold: dnode离线阈值,超过该时间将导致该dnode从集群中删除。单位为秒,默认值:86400*100(即100天)。
|
- offlineThreshold: dnode离线阈值,超过该时间将导致该dnode从集群中删除。单位为秒,默认值:86400*10(即10天)。
|
||||||
- statusInterval: dnode向mnode报告状态时长。单位为秒,默认值:1。
|
- statusInterval: dnode向mnode报告状态时长。单位为秒,默认值:1。
|
||||||
- maxTablesPerVnode: 每个vnode中能够创建的最大表个数。默认值:1000000。
|
- maxTablesPerVnode: 每个vnode中能够创建的最大表个数。默认值:1000000。
|
||||||
- maxVgroupsPerDb: 每个数据库中能够使用的最大vgroup个数。
|
- maxVgroupsPerDb: 每个数据库中能够使用的最大vgroup个数。
|
||||||
|
@ -462,31 +462,31 @@ TDengine的所有可执行文件默认存放在 _/usr/local/taos/bin_ 目录下
|
||||||
|
|
||||||
| 关键字列表 | | | | |
|
| 关键字列表 | | | | |
|
||||||
| ---------- | ----------- | ------------ | ---------- | --------- |
|
| ---------- | ----------- | ------------ | ---------- | --------- |
|
||||||
| ABLOCKS | CONNECTIONS | HAVING | MODULES | SLIMIT |
|
| ABLOCKS | CONNECTIONS | HAVING | MODULES | SMALLINT |
|
||||||
| ABORT | COPY | ID | NCHAR | SMALLINT |
|
| ABORT | COPY | ID | NCHAR | SPREAD |
|
||||||
| ACCOUNT | COUNT | IF | NE | SPREAD |
|
| ACCOUNT | COUNT | IF | NE | STABLE |
|
||||||
| ACCOUNTS | CREATE | IGNORE | NONE | STABLE |
|
| ACCOUNTS | CREATE | IGNORE | NONE | STABLES |
|
||||||
| ADD | CTIME | IMMEDIATE | NOT | STABLES |
|
| ADD | CTIME | IMMEDIATE | NOT | STAR |
|
||||||
| AFTER | DATABASE | IMPORT | NOTNULL | STAR |
|
| AFTER | DATABASE | IMPORT | NOTNULL | STATEMENT |
|
||||||
| ALL | DATABASES | IN | NOW | STATEMENT |
|
| ALL | DATABASES | IN | NOW | STDDEV |
|
||||||
| ALTER | DAYS | INITIALLY | OF | STDDEV |
|
| ALTER | DAYS | INITIALLY | OF | STREAM |
|
||||||
| AND | DEFERRED | INSERT | OFFSET | STREAM |
|
| AND | DEFERRED | INSERT | OFFSET | STREAMS |
|
||||||
| AS | DELIMITERS | INSTEAD | OR | STREAMS |
|
| AS | DELIMITERS | INSTEAD | OR | STRING |
|
||||||
| ASC | DESC | INTEGER | ORDER | STRING |
|
| ASC | DESC | INTEGER | ORDER | SUM |
|
||||||
| ATTACH | DESCRIBE | INTERVAL | PASS | SUM |
|
| ATTACH | DESCRIBE | INTERVAL | PASS | TABLE |
|
||||||
| AVG | DETACH | INTO | PERCENTILE | TABLE |
|
| AVG | DETACH | INTO | PERCENTILE | TABLES |
|
||||||
| BEFORE | DIFF | IP | PLUS | TABLES |
|
| BEFORE | DIFF | IP | PLUS | TAG |
|
||||||
| BEGIN | DISTINCT | IS | PRAGMA | TAG |
|
| BEGIN | DISTINCT | IS | PRAGMA | TAGS |
|
||||||
| BETWEEN | DIVIDE | ISNULL | PREV | TAGS |
|
| BETWEEN | DIVIDE | ISNULL | PREV | TBLOCKS |
|
||||||
| BIGINT | DNODE | JOIN | PRIVILEGE | TBLOCKS |
|
| BIGINT | DNODE | JOIN | PRIVILEGE | TBNAME |
|
||||||
| BINARY | DNODES | KEEP | QUERIES | TBNAME |
|
| BINARY | DNODES | KEEP | QUERIES | TIMES |
|
||||||
| BITAND | DOT | KEY | QUERY | TIMES |
|
| BITAND | DOT | KEY | QUERY | TIMESTAMP |
|
||||||
| BITNOT | DOUBLE | KILL | RAISE | TIMESTAMP |
|
| BITNOT | DOUBLE | KILL | RAISE | TINYINT |
|
||||||
| BITOR | DROP | LAST | REM | TINYINT |
|
| BITOR | DROP | LAST | REM | TOP |
|
||||||
| BOOL | EACH | LE | REPLACE | TOP |
|
| BOOL | EACH | LE | REPLACE | TOPIC |
|
||||||
| BOTTOM | END | LEASTSQUARES | REPLICA | TOPIC |
|
| BOTTOM | END | LEASTSQUARES | REPLICA | TRIGGER |
|
||||||
| BY | EQ | LIKE | RESET | TRIGGER |
|
| BY | EQ | LIKE | RESET | UMINUS |
|
||||||
| CACHE | EXISTS | LIMIT | RESTRICT | UMINUS |
|
| CACHE | EXISTS | LIMIT | RESTRICT | UNION |
|
||||||
| CASCADE | EXPLAIN | LINEAR | ROW | UPLUS |
|
| CASCADE | EXPLAIN | LINEAR | ROW | UPLUS |
|
||||||
| CHANGE | FAIL | LOCAL | ROWS | USE |
|
| CHANGE | FAIL | LOCAL | ROWS | USE |
|
||||||
| CLOG | FILL | LP | RP | USER |
|
| CLOG | FILL | LP | RP | USER |
|
||||||
|
@ -498,5 +498,5 @@ TDengine的所有可执行文件默认存放在 _/usr/local/taos/bin_ 目录下
|
||||||
| CONCAT | GLOB | METRICS | SHOW | VIEW |
|
| CONCAT | GLOB | METRICS | SHOW | VIEW |
|
||||||
| CONFIGS | GRANTS | MIN | SLASH | WAVG |
|
| CONFIGS | GRANTS | MIN | SLASH | WAVG |
|
||||||
| CONFLICT | GROUP | MINUS | SLIDING | WHERE |
|
| CONFLICT | GROUP | MINUS | SLIDING | WHERE |
|
||||||
| CONNECTION | GT | MNODES | | |
|
| CONNECTION | GT | MNODES | SLIMIT | |
|
||||||
|
|
||||||
|
|
|
@ -407,7 +407,7 @@ SELECT select_expr [, select_expr ...]
|
||||||
[INTERVAL (interval_val [, interval_offset])]
|
[INTERVAL (interval_val [, interval_offset])]
|
||||||
[SLIDING sliding_val]
|
[SLIDING sliding_val]
|
||||||
[FILL fill_val]
|
[FILL fill_val]
|
||||||
[GROUP BY col_list <!-- [HAVING having_condition] -->]
|
[GROUP BY col_list]
|
||||||
[ORDER BY col_list { DESC | ASC }]
|
[ORDER BY col_list { DESC | ASC }]
|
||||||
[SLIMIT limit_val [SOFFSET offset_val]]
|
[SLIMIT limit_val [SOFFSET offset_val]]
|
||||||
[LIMIT limit_val [OFFSET offset_val]]
|
[LIMIT limit_val [OFFSET offset_val]]
|
||||||
|
@ -647,7 +647,7 @@ Query OK, 1 row(s) in set (0.001091s)
|
||||||
3. 从 2.0.17 版本开始,条件过滤开始支持 BETWEEN AND 语法,例如 `WHERE col2 BETWEEN 1.5 AND 3.25` 表示查询条件为“1.5 ≤ col2 ≤ 3.25”。
|
3. 从 2.0.17 版本开始,条件过滤开始支持 BETWEEN AND 语法,例如 `WHERE col2 BETWEEN 1.5 AND 3.25` 表示查询条件为“1.5 ≤ col2 ≤ 3.25”。
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
### GROUP BY 之后的 HAVING 过滤
|
### <a class="anchor" id="having"></a>GROUP BY 之后的 HAVING 过滤
|
||||||
|
|
||||||
从 2.0.20 版本开始,GROUP BY 之后允许再跟一个 HAVING 子句,对成组后的各组数据再做筛选。HAVING 子句可以使用聚合函数和选择函数作为过滤条件(但暂时不支持 LEASTSQUARES、TOP、BOTTOM、LAST_ROW)。
|
从 2.0.20 版本开始,GROUP BY 之后允许再跟一个 HAVING 子句,对成组后的各组数据再做筛选。HAVING 子句可以使用聚合函数和选择函数作为过滤条件(但暂时不支持 LEASTSQUARES、TOP、BOTTOM、LAST_ROW)。
|
||||||
|
|
||||||
|
@ -657,6 +657,16 @@ SELECT AVG(f1), SPREAD(f1, f2, st2.f1) FROM st2 WHERE f1 > 0 GROUP BY f1 HAVING
|
||||||
```
|
```
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### <a class="anchor" id="union"></a>UNION ALL 操作符
|
||||||
|
|
||||||
|
```mysql
|
||||||
|
SELECT ...
|
||||||
|
UNION ALL SELECT ...
|
||||||
|
[UNION ALL SELECT ...]
|
||||||
|
```
|
||||||
|
|
||||||
|
TDengine 支持 UNION ALL 操作符。也就是说,如果多个 SELECT 子句返回结果集的结构完全相同(列名、列类型、列数、顺序),那么可以通过 UNION ALL 把这些结果集合并到一起。目前只支持 UNION ALL 模式,也即在结果集的合并过程中是不去重的。
|
||||||
|
|
||||||
### SQL 示例
|
### SQL 示例
|
||||||
|
|
||||||
- 对于下面的例子,表tb1用以下语句创建
|
- 对于下面的例子,表tb1用以下语句创建
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
name: tdengine
|
name: tdengine
|
||||||
base: core18
|
base: core18
|
||||||
version: '2.0.20.0'
|
version: '2.1.0.0'
|
||||||
icon: snap/gui/t-dengine.svg
|
icon: snap/gui/t-dengine.svg
|
||||||
summary: an open-source big data platform designed and optimized for IoT.
|
summary: an open-source big data platform designed and optimized for IoT.
|
||||||
description: |
|
description: |
|
||||||
|
@ -72,7 +72,7 @@ parts:
|
||||||
- usr/bin/taosd
|
- usr/bin/taosd
|
||||||
- usr/bin/taos
|
- usr/bin/taos
|
||||||
- usr/bin/taosdemo
|
- usr/bin/taosdemo
|
||||||
- usr/lib/libtaos.so.2.0.20.0
|
- usr/lib/libtaos.so.2.1.0.0
|
||||||
- usr/lib/libtaos.so.1
|
- usr/lib/libtaos.so.1
|
||||||
- usr/lib/libtaos.so
|
- usr/lib/libtaos.so
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.8",
|
version="2.0.9",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -21,11 +21,17 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.7",
|
version="2.0.9",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -21,11 +21,17 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.7",
|
version="2.0.9",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -21,11 +21,17 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.7",
|
version="2.0.9",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -21,11 +21,17 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.7",
|
version="2.0.9",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -21,11 +21,17 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(
|
return [
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]))
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int64))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -72,6 +72,7 @@ enum _show_db_index {
|
||||||
TSDB_SHOW_DB_WALLEVEL_INDEX,
|
TSDB_SHOW_DB_WALLEVEL_INDEX,
|
||||||
TSDB_SHOW_DB_FSYNC_INDEX,
|
TSDB_SHOW_DB_FSYNC_INDEX,
|
||||||
TSDB_SHOW_DB_COMP_INDEX,
|
TSDB_SHOW_DB_COMP_INDEX,
|
||||||
|
TSDB_SHOW_DB_CACHELAST_INDEX,
|
||||||
TSDB_SHOW_DB_PRECISION_INDEX,
|
TSDB_SHOW_DB_PRECISION_INDEX,
|
||||||
TSDB_SHOW_DB_UPDATE_INDEX,
|
TSDB_SHOW_DB_UPDATE_INDEX,
|
||||||
TSDB_SHOW_DB_STATUS_INDEX,
|
TSDB_SHOW_DB_STATUS_INDEX,
|
||||||
|
@ -99,11 +100,13 @@ enum _describe_table_index {
|
||||||
TSDB_MAX_DESCRIBE_METRIC
|
TSDB_MAX_DESCRIBE_METRIC
|
||||||
};
|
};
|
||||||
|
|
||||||
|
#define COL_NOTE_LEN 128
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
char field[TSDB_COL_NAME_LEN + 1];
|
char field[TSDB_COL_NAME_LEN + 1];
|
||||||
char type[16];
|
char type[16];
|
||||||
int length;
|
int length;
|
||||||
char note[128];
|
char note[COL_NOTE_LEN];
|
||||||
} SColDes;
|
} SColDes;
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
|
@ -132,6 +135,7 @@ typedef struct {
|
||||||
int8_t wallevel;
|
int8_t wallevel;
|
||||||
int32_t fsync;
|
int32_t fsync;
|
||||||
int8_t comp;
|
int8_t comp;
|
||||||
|
int8_t cachelast;
|
||||||
char precision[8]; // time resolution
|
char precision[8]; // time resolution
|
||||||
int8_t update;
|
int8_t update;
|
||||||
char status[16];
|
char status[16];
|
||||||
|
@ -523,7 +527,7 @@ int main(int argc, char *argv[]) {
|
||||||
|
|
||||||
/* Parse our arguments; every option seen by parse_opt will be
|
/* Parse our arguments; every option seen by parse_opt will be
|
||||||
reflected in arguments. */
|
reflected in arguments. */
|
||||||
if (argc > 1)
|
if (argc > 2)
|
||||||
parse_args(argc, argv, &g_args);
|
parse_args(argc, argv, &g_args);
|
||||||
|
|
||||||
argp_parse(&argp, argc, argv, 0, 0, &g_args);
|
argp_parse(&argp, argc, argv, 0, 0, &g_args);
|
||||||
|
@ -974,6 +978,7 @@ int taosDumpOut(struct arguments *arguments) {
|
||||||
dbInfos[count]->wallevel = *((int8_t *)row[TSDB_SHOW_DB_WALLEVEL_INDEX]);
|
dbInfos[count]->wallevel = *((int8_t *)row[TSDB_SHOW_DB_WALLEVEL_INDEX]);
|
||||||
dbInfos[count]->fsync = *((int32_t *)row[TSDB_SHOW_DB_FSYNC_INDEX]);
|
dbInfos[count]->fsync = *((int32_t *)row[TSDB_SHOW_DB_FSYNC_INDEX]);
|
||||||
dbInfos[count]->comp = (int8_t)(*((int8_t *)row[TSDB_SHOW_DB_COMP_INDEX]));
|
dbInfos[count]->comp = (int8_t)(*((int8_t *)row[TSDB_SHOW_DB_COMP_INDEX]));
|
||||||
|
dbInfos[count]->cachelast = (int8_t)(*((int8_t *)row[TSDB_SHOW_DB_CACHELAST_INDEX]));
|
||||||
|
|
||||||
strncpy(dbInfos[count]->precision, (char *)row[TSDB_SHOW_DB_PRECISION_INDEX], fields[TSDB_SHOW_DB_PRECISION_INDEX].bytes);
|
strncpy(dbInfos[count]->precision, (char *)row[TSDB_SHOW_DB_PRECISION_INDEX], fields[TSDB_SHOW_DB_PRECISION_INDEX].bytes);
|
||||||
//dbInfos[count]->precision = *((int8_t *)row[TSDB_SHOW_DB_PRECISION_INDEX]);
|
//dbInfos[count]->precision = *((int8_t *)row[TSDB_SHOW_DB_PRECISION_INDEX]);
|
||||||
|
@ -1188,16 +1193,16 @@ int taosGetTableDes(char* dbName, char *table, STableDef *tableDes, TAOS* taosCo
|
||||||
case TSDB_DATA_TYPE_BINARY: {
|
case TSDB_DATA_TYPE_BINARY: {
|
||||||
memset(tableDes->cols[i].note, 0, sizeof(tableDes->cols[i].note));
|
memset(tableDes->cols[i].note, 0, sizeof(tableDes->cols[i].note));
|
||||||
tableDes->cols[i].note[0] = '\'';
|
tableDes->cols[i].note[0] = '\'';
|
||||||
char tbuf[COMMAND_SIZE];
|
char tbuf[COL_NOTE_LEN];
|
||||||
converStringToReadable((char *)row[0], length[0], tbuf, COMMAND_SIZE);
|
converStringToReadable((char *)row[0], length[0], tbuf, COL_NOTE_LEN);
|
||||||
char* pstr = stpcpy(&(tableDes->cols[i].note[1]), tbuf);
|
char* pstr = stpcpy(&(tableDes->cols[i].note[1]), tbuf);
|
||||||
*(pstr++) = '\'';
|
*(pstr++) = '\'';
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TSDB_DATA_TYPE_NCHAR: {
|
case TSDB_DATA_TYPE_NCHAR: {
|
||||||
memset(tableDes->cols[i].note, 0, sizeof(tableDes->cols[i].note));
|
memset(tableDes->cols[i].note, 0, sizeof(tableDes->cols[i].note));
|
||||||
char tbuf[COMMAND_SIZE];
|
char tbuf[COL_NOTE_LEN-2]; // need reserve 2 bytes for ' '
|
||||||
convertNCharToReadable((char *)row[0], length[0], tbuf, COMMAND_SIZE);
|
convertNCharToReadable((char *)row[0], length[0], tbuf, COL_NOTE_LEN);
|
||||||
sprintf(tableDes->cols[i].note, "\'%s\'", tbuf);
|
sprintf(tableDes->cols[i].note, "\'%s\'", tbuf);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -1280,9 +1285,10 @@ void taosDumpCreateDbClause(SDbInfo *dbInfo, bool isDumpProperty, FILE *fp) {
|
||||||
pstr += sprintf(pstr, "CREATE DATABASE IF NOT EXISTS %s ", dbInfo->name);
|
pstr += sprintf(pstr, "CREATE DATABASE IF NOT EXISTS %s ", dbInfo->name);
|
||||||
if (isDumpProperty) {
|
if (isDumpProperty) {
|
||||||
pstr += sprintf(pstr,
|
pstr += sprintf(pstr,
|
||||||
"TABLES %d VGROUPS %d REPLICA %d QUORUM %d DAYS %d KEEP %s CACHE %d BLOCKS %d MINROWS %d MAXROWS %d WALLEVEL %d FYNC %d COMP %d PRECISION '%s' UPDATE %d",
|
"REPLICA %d QUORUM %d DAYS %d KEEP %s CACHE %d BLOCKS %d MINROWS %d MAXROWS %d FSYNC %d CACHELAST %d COMP %d PRECISION '%s' UPDATE %d",
|
||||||
dbInfo->ntables, dbInfo->vgroups, dbInfo->replica, dbInfo->quorum, dbInfo->days, dbInfo->keeplist, dbInfo->cache,
|
dbInfo->replica, dbInfo->quorum, dbInfo->days, dbInfo->keeplist, dbInfo->cache,
|
||||||
dbInfo->blocks, dbInfo->minrows, dbInfo->maxrows, dbInfo->wallevel, dbInfo->fsync, dbInfo->comp, dbInfo->precision, dbInfo->update);
|
dbInfo->blocks, dbInfo->minrows, dbInfo->maxrows, dbInfo->fsync, dbInfo->cachelast,
|
||||||
|
dbInfo->comp, dbInfo->precision, dbInfo->update);
|
||||||
}
|
}
|
||||||
|
|
||||||
pstr += sprintf(pstr, ";");
|
pstr += sprintf(pstr, ";");
|
||||||
|
|
|
@ -171,7 +171,7 @@ typedef struct HttpThread {
|
||||||
EpollFd pollFd;
|
EpollFd pollFd;
|
||||||
int32_t numOfContexts;
|
int32_t numOfContexts;
|
||||||
int32_t threadId;
|
int32_t threadId;
|
||||||
char label[HTTP_LABEL_SIZE];
|
char label[HTTP_LABEL_SIZE << 1];
|
||||||
bool (*processData)(HttpContext *pContext);
|
bool (*processData)(HttpContext *pContext);
|
||||||
} HttpThread;
|
} HttpThread;
|
||||||
|
|
||||||
|
|
|
@ -4466,13 +4466,18 @@ SArray* getOrderCheckColumns(SQueryAttr* pQuery) {
|
||||||
for(int32_t i = 0; i < numOfCols; ++i) {
|
for(int32_t i = 0; i < numOfCols; ++i) {
|
||||||
SColIndex* index = taosArrayGet(pOrderColumns, i);
|
SColIndex* index = taosArrayGet(pOrderColumns, i);
|
||||||
for(int32_t j = 0; j < pQuery->numOfOutput; ++j) {
|
for(int32_t j = 0; j < pQuery->numOfOutput; ++j) {
|
||||||
if (index->colId == pQuery->pExpr1[j].base.colInfo.colId) {
|
SSqlExpr* pExpr = &pQuery->pExpr1[j].base;
|
||||||
|
int32_t functionId = pExpr->functionId;
|
||||||
|
|
||||||
|
if (index->colId == pExpr->colInfo.colId &&
|
||||||
|
(functionId == TSDB_FUNC_PRJ || functionId == TSDB_FUNC_TAG || functionId == TSDB_FUNC_TS)) {
|
||||||
index->colIndex = j;
|
index->colIndex = j;
|
||||||
index->colId = pQuery->pExpr1[j].base.resColId;
|
index->colId = pExpr->resColId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return pOrderColumns;
|
return pOrderColumns;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4804,7 +4809,7 @@ static SSDataBlock* doArithmeticOperation(void* param, bool* newgroup) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return result of the previous group in the firstly.
|
// Return result of the previous group in the firstly.
|
||||||
if (newgroup && pRes->info.rows > 0) {
|
if (*newgroup && pRes->info.rows > 0) {
|
||||||
pArithInfo->existDataBlock = pBlock;
|
pArithInfo->existDataBlock = pBlock;
|
||||||
clearNumOfRes(pInfo->pCtx, pOperator->numOfOutput);
|
clearNumOfRes(pInfo->pCtx, pOperator->numOfOutput);
|
||||||
return pInfo->pRes;
|
return pInfo->pRes;
|
||||||
|
|
|
@ -295,7 +295,7 @@ void *rpcOpen(const SRpcInit *pInit) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
pRpc->pCache = rpcOpenConnCache(pRpc->sessions, rpcCloseConn, pRpc->tmrCtrl, pRpc->idleTime * 30);
|
pRpc->pCache = rpcOpenConnCache(pRpc->sessions, rpcCloseConn, pRpc->tmrCtrl, pRpc->idleTime * 20);
|
||||||
if ( pRpc->pCache == NULL ) {
|
if ( pRpc->pCache == NULL ) {
|
||||||
tError("%s failed to init connection cache", pRpc->label);
|
tError("%s failed to init connection cache", pRpc->label);
|
||||||
rpcClose(pRpc);
|
rpcClose(pRpc);
|
||||||
|
|
|
@ -539,7 +539,7 @@ static void taosNetTestServer(char *host, int32_t startPort, int32_t pkgLen) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void taosNetTest(char *role, char *host, int32_t port, int32_t pkgLen) {
|
void taosNetTest(char *role, char *host, int32_t port, int32_t pkgLen) {
|
||||||
// tscEmbedded = 1;
|
tscEmbedded = 1;
|
||||||
if (host == NULL) host = tsLocalFqdn;
|
if (host == NULL) host = tsLocalFqdn;
|
||||||
if (port == 0) port = tsServerPort;
|
if (port == 0) port = tsServerPort;
|
||||||
if (pkgLen <= 10) pkgLen = 1000;
|
if (pkgLen <= 10) pkgLen = 1000;
|
||||||
|
@ -550,6 +550,7 @@ void taosNetTest(char *role, char *host, int32_t port, int32_t pkgLen) {
|
||||||
} else if (0 == strcmp("server", role)) {
|
} else if (0 == strcmp("server", role)) {
|
||||||
taosNetTestServer(host, port, pkgLen);
|
taosNetTestServer(host, port, pkgLen);
|
||||||
} else if (0 == strcmp("rpc", role)) {
|
} else if (0 == strcmp("rpc", role)) {
|
||||||
|
tscEmbedded = 0;
|
||||||
taosNetTestRpc(host, port, pkgLen);
|
taosNetTestRpc(host, port, pkgLen);
|
||||||
} else if (0 == strcmp("sync", role)) {
|
} else if (0 == strcmp("sync", role)) {
|
||||||
taosNetCheckSync(host, port);
|
taosNetCheckSync(host, port);
|
||||||
|
@ -559,5 +560,5 @@ void taosNetTest(char *role, char *host, int32_t port, int32_t pkgLen) {
|
||||||
taosNetTestStartup(host, port);
|
taosNetTestStartup(host, port);
|
||||||
}
|
}
|
||||||
|
|
||||||
// tscEmbedded = 0;
|
tscEmbedded = 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,20 +28,22 @@ class TDTestCase:
|
||||||
sql = "select server_version()"
|
sql = "select server_version()"
|
||||||
ret = tdSql.query(sql)
|
ret = tdSql.query(sql)
|
||||||
version = tdSql.getData(0, 0)[0:3]
|
version = tdSql.getData(0, 0)[0:3]
|
||||||
expectedVersion = "2.0"
|
expectedVersion_dev = "2.0"
|
||||||
if(version == expectedVersion):
|
expectedVersion_master = "2.1"
|
||||||
tdLog.info("sql:%s, row:%d col:%d data:%s == expect:%s" % (sql, 0, 0, version, expectedVersion))
|
if(version == expectedVersion_dev or version == expectedVersion_master):
|
||||||
|
tdLog.info("sql:%s, row:%d col:%d data:%s == expect" % (sql, 0, 0, version))
|
||||||
else:
|
else:
|
||||||
tdLog.exit("sql:%s, row:%d col:%d data:%s != expect:%s" % (sql, 0, 0, version, expectedVersion))
|
tdLog.exit("sql:%s, row:%d col:%d data:%s != expect:%s or %s " % (sql, 0, 0, version, expectedVersion_dev, expectedVersion_master))
|
||||||
|
|
||||||
sql = "select client_version()"
|
sql = "select client_version()"
|
||||||
ret = tdSql.query(sql)
|
ret = tdSql.query(sql)
|
||||||
version = tdSql.getData(0, 0)[0:3]
|
version = tdSql.getData(0, 0)[0:3]
|
||||||
expectedVersion = "2.0"
|
expectedVersion_dev = "2.0"
|
||||||
if(version == expectedVersion):
|
expectedVersion_master = "2.1"
|
||||||
tdLog.info("sql:%s, row:%d col:%d data:%s == expect:%s" % (sql, 0, 0, version, expectedVersion))
|
if(version == expectedVersion_dev or version == expectedVersion_master):
|
||||||
|
tdLog.info("sql:%s, row:%d col:%d data:%s == expect" % (sql, 0, 0, version))
|
||||||
else:
|
else:
|
||||||
tdLog.exit("sql:%s, row:%d col:%d data:%s != expect:%s" % (sql, 0, 0, version, expectedVersion))
|
tdLog.exit("sql:%s, row:%d col:%d data:%s != expect:%s or %s " % (sql, 0, 0, version, expectedVersion_dev, expectedVersion_master))
|
||||||
|
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
|
|
@ -17332,3 +17332,168 @@
|
||||||
fun:PyVectorcall_Call
|
fun:PyVectorcall_Call
|
||||||
fun:_PyEval_EvalFrameDefault
|
fun:_PyEval_EvalFrameDefault
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:lib_build_and_cache_attr
|
||||||
|
fun:lib_getattr
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyFunction_Vectorcall
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyEval_EvalCodeWithName
|
||||||
|
fun:PyEval_EvalCode
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:lib_build_and_cache_attr
|
||||||
|
fun:lib_getattr
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyFunction_Vectorcall
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:PyEval_EvalCode
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyEval_EvalCodeWithName
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:_my_Py_InitModule
|
||||||
|
fun:lib_getattr
|
||||||
|
fun:b_init_cffi_1_0_external_module
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyObject_CallMethod
|
||||||
|
fun:_cffi_init
|
||||||
|
fun:PyInit__bcrypt
|
||||||
|
fun:_PyImport_LoadDynamicModuleWithSpec
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:_PyObject_GC_New
|
||||||
|
fun:lib_getattr
|
||||||
|
fun:ffi_internal_new
|
||||||
|
fun:b_init_cffi_1_0_external_module
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyObject_CallMethod
|
||||||
|
fun:_cffi_init
|
||||||
|
fun:PyInit__bcrypt
|
||||||
|
fun:_PyImport_LoadDynamicModuleWithSpec
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:lib_build_cpython_func.isra.87
|
||||||
|
fun:lib_build_and_cache_attr
|
||||||
|
fun:lib_getattr
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyFunction_Vectorcall
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:lib_build_and_cache_attr
|
||||||
|
fun:lib_getattr
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyFunction_Vectorcall
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
fun:_PyEval_EvalCodeWithName
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:_my_Py_InitModule
|
||||||
|
fun:b_init_cffi_1_0_external_module
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyObject_CallMethod
|
||||||
|
fun:_cffi_init
|
||||||
|
fun:PyInit__bcrypt
|
||||||
|
fun:_PyImport_LoadDynamicModuleWithSpec
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:_my_Py_InitModule
|
||||||
|
fun:b_init_cffi_1_0_external_module
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyObject_CallMethod
|
||||||
|
fun:PyInit__openssl
|
||||||
|
fun:_PyImport_LoadDynamicModuleWithSpec
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
fun:_PyEval_EvalFrameDefault
|
||||||
|
}
|
||||||
|
{
|
||||||
|
<insert_a_suppression_name_here>
|
||||||
|
Memcheck:Leak
|
||||||
|
match-leak-kinds: definite
|
||||||
|
fun:malloc
|
||||||
|
fun:_PyObject_GC_New
|
||||||
|
fun:ffi_internal_new
|
||||||
|
fun:b_init_cffi_1_0_external_module
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyObject_CallMethod
|
||||||
|
fun:_cffi_init
|
||||||
|
fun:PyInit__bcrypt
|
||||||
|
fun:_PyImport_LoadDynamicModuleWithSpec
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
obj:/usr/bin/python3.8
|
||||||
|
fun:PyVectorcall_Call
|
||||||
|
}
|
Loading…
Reference in New Issue