Merge branch 'develop' into coverity_scan
|
@ -1,3 +1,6 @@
|
||||||
[submodule "src/connector/go"]
|
[submodule "src/connector/go"]
|
||||||
path = src/connector/go
|
path = src/connector/go
|
||||||
url = https://github.com/taosdata/driver-go
|
url = https://github.com/taosdata/driver-go
|
||||||
|
[submodule "src/connector/grafanaplugin"]
|
||||||
|
path = src/connector/grafanaplugin
|
||||||
|
url = https://github.com/taosdata/grafanaplugin
|
||||||
|
|
|
@ -18,6 +18,7 @@ SET(TD_COVER FALSE)
|
||||||
SET(TD_MEM_CHECK FALSE)
|
SET(TD_MEM_CHECK FALSE)
|
||||||
|
|
||||||
SET(TD_PAGMODE_LITE FALSE)
|
SET(TD_PAGMODE_LITE FALSE)
|
||||||
|
SET(TD_SOMODE_STATIC FALSE)
|
||||||
SET(TD_GODLL FALSE)
|
SET(TD_GODLL FALSE)
|
||||||
|
|
||||||
SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
|
SET(TD_COMMUNITY_DIR ${PROJECT_SOURCE_DIR})
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
[](https://ci.appveyor.com/project/sangshuduo/tdengine-2n8ge/branch/master)
|
[](https://ci.appveyor.com/project/sangshuduo/tdengine-2n8ge/branch/master)
|
||||||
[](https://coveralls.io/github/taosdata/TDengine?branch=develop)
|
[](https://coveralls.io/github/taosdata/TDengine?branch=develop)
|
||||||
[](https://bestpractices.coreinfrastructure.org/projects/4201)
|
[](https://bestpractices.coreinfrastructure.org/projects/4201)
|
||||||
|

|
||||||
|
|
||||||
[](https://www.taosdata.com)
|
[](https://www.taosdata.com)
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,7 @@ IF (TD_WINDOWS)
|
||||||
ADD_DEFINITIONS(-D_MBCS -D_CRT_SECURE_NO_DEPRECATE -D_CRT_NONSTDC_NO_DEPRECATE)
|
ADD_DEFINITIONS(-D_MBCS -D_CRT_SECURE_NO_DEPRECATE -D_CRT_NONSTDC_NO_DEPRECATE)
|
||||||
SET(CMAKE_GENERATOR "NMake Makefiles" CACHE INTERNAL "" FORCE)
|
SET(CMAKE_GENERATOR "NMake Makefiles" CACHE INTERNAL "" FORCE)
|
||||||
IF (NOT TD_GODLL)
|
IF (NOT TD_GODLL)
|
||||||
SET(COMMON_FLAGS "/nologo /WX /Oi /Oy- /Gm- /EHsc /MT /GS /Gy /fp:precise /Zc:wchar_t /Zc:forScope /Gd /errorReport:prompt /analyze-")
|
SET(COMMON_FLAGS "/nologo /WX /wd4018 /wd2220 /Oi /Oy- /Gm- /EHsc /MT /GS /Gy /fp:precise /Zc:wchar_t /Zc:forScope /Gd /errorReport:prompt /analyze-")
|
||||||
SET(DEBUG_FLAGS "/Zi /W3 /GL")
|
SET(DEBUG_FLAGS "/Zi /W3 /GL")
|
||||||
SET(RELEASE_FLAGS "/W0 /GL")
|
SET(RELEASE_FLAGS "/W0 /GL")
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
|
@ -22,6 +22,11 @@ IF (${PAGMODE} MATCHES "lite")
|
||||||
MESSAGE(STATUS "Build with pagmode lite")
|
MESSAGE(STATUS "Build with pagmode lite")
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
||||||
|
IF (${SOMODE} MATCHES "static")
|
||||||
|
SET(TD_SOMODE_STATIC TRUE)
|
||||||
|
MESSAGE(STATUS "Link so using static mode")
|
||||||
|
ENDIF ()
|
||||||
|
|
||||||
IF (${DLLTYPE} MATCHES "go")
|
IF (${DLLTYPE} MATCHES "go")
|
||||||
SET(TD_GODLL TRUE)
|
SET(TD_GODLL TRUE)
|
||||||
MESSAGE(STATUS "input dll type: " ${DLLTYPE})
|
MESSAGE(STATUS "input dll type: " ${DLLTYPE})
|
||||||
|
|
|
@ -68,8 +68,7 @@ ELSEIF (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||||
MESSAGE(STATUS "The current platform is Windows 64-bit")
|
MESSAGE(STATUS "The current platform is Windows 64-bit")
|
||||||
ELSE ()
|
ELSE ()
|
||||||
SET(TD_WINDOWS_32 TRUE)
|
SET(TD_WINDOWS_32 TRUE)
|
||||||
MESSAGE(FATAL_ERROR "The current platform is Windows 32-bit, not supported yet")
|
MESSAGE(STATUS "The current platform is Windows 32-bit")
|
||||||
EXIT ()
|
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
ELSE()
|
ELSE()
|
||||||
MESSAGE(FATAL_ERROR "The current platform is not Linux/Darwin/Windows, stop compile")
|
MESSAGE(FATAL_ERROR "The current platform is not Linux/Darwin/Windows, stop compile")
|
||||||
|
|
|
@ -59,7 +59,7 @@
|
||||||
/* Generate the OS-and-debug-mode-specific library name */
|
/* Generate the OS-and-debug-mode-specific library name */
|
||||||
#define _MSVCLIBX_LIB "MsvcLibX" _MSVCLIBX_LIB_OS_SUFFIX _MSVCLIBX_LIB_DBG_SUFFIX ".lib"
|
#define _MSVCLIBX_LIB "MsvcLibX" _MSVCLIBX_LIB_OS_SUFFIX _MSVCLIBX_LIB_DBG_SUFFIX ".lib"
|
||||||
//#pragma message("Adding pragma comment(lib, \"" _MSVCLIBX_LIB "\")")
|
//#pragma message("Adding pragma comment(lib, \"" _MSVCLIBX_LIB "\")")
|
||||||
#pragma comment(lib, _MSVCLIBX_LIB)
|
//#pragma comment(lib, _MSVCLIBX_LIB)
|
||||||
|
|
||||||
/* Library-specific routine used internally by many standard routines */
|
/* Library-specific routine used internally by many standard routines */
|
||||||
#if defined(_WIN32)
|
#if defined(_WIN32)
|
||||||
|
|
|
@ -359,12 +359,12 @@ static unsigned char* ensure(printbuffer * const p, size_t needed)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* calculate new buffer size */
|
/* calculate new buffer size */
|
||||||
if (needed > (LLONG_MAX / 2))
|
if (needed > (LONG_MAX / 2))
|
||||||
{
|
{
|
||||||
/* overflow of int, use LLONG_MAX if possible */
|
/* overflow of int, use LLONG_MAX if possible */
|
||||||
if (needed <= LLONG_MAX)
|
if (needed <= LONG_MAX)
|
||||||
{
|
{
|
||||||
newsize = LLONG_MAX;
|
newsize = LONG_MAX;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|
After Width: | Height: | Size: 45 KiB |
|
@ -15,6 +15,9 @@ TDengine的模块之一是时序数据库。但除此之外,为减少研发的
|
||||||
|
|
||||||
采用TDengine,可将典型的物联网、车联网、工业互联网大数据平台的总拥有成本大幅降低。但需要指出的是,因充分利用了物联网时序数据的特点,它无法用来处理网络爬虫、微博、微信、电商、ERP、CRM等通用型数据。
|
采用TDengine,可将典型的物联网、车联网、工业互联网大数据平台的总拥有成本大幅降低。但需要指出的是,因充分利用了物联网时序数据的特点,它无法用来处理网络爬虫、微博、微信、电商、ERP、CRM等通用型数据。
|
||||||
|
|
||||||
|
<center> <img src="../assets/EcoSystem.png"> </center>
|
||||||
|
<center>图 1. TDengine技术生态图</center>
|
||||||
|
|
||||||
## TDengine 总体适用场景
|
## TDengine 总体适用场景
|
||||||
|
|
||||||
作为一个IOT大数据平台,TDengine的典型适用场景是在IOT范畴,而且用户有一定的数据量。本文后续的介绍主要针对这个范畴里面的系统。范畴之外的系统,比如CRM,ERP等,不在本文讨论范围内。
|
作为一个IOT大数据平台,TDengine的典型适用场景是在IOT范畴,而且用户有一定的数据量。本文后续的介绍主要针对这个范畴里面的系统。范畴之外的系统,比如CRM,ERP等,不在本文讨论范围内。
|
||||||
|
|
|
@ -9,9 +9,9 @@ TDengine采用关系型数据模型,需要建库、建表。因此对于一个
|
||||||
不同类型的数据采集点往往具有不同的数据特征,包括数据采集频率的高低,数据保留时间的长短,副本的数目,数据块的大小等等。为让各种场景下TDengine都能最大效率的工作,TDengine建议将不同数据特征的表创建在不同的库里,因为每个库可以配置不同的存储策略。创建一个库时,除SQL标准的选项外,应用还可以指定保留时长、副本数、内存块个数、时间精度、文件块里最大最小记录条数、是否压缩、一个数据文件覆盖的天数等多种参数。比如:
|
不同类型的数据采集点往往具有不同的数据特征,包括数据采集频率的高低,数据保留时间的长短,副本的数目,数据块的大小等等。为让各种场景下TDengine都能最大效率的工作,TDengine建议将不同数据特征的表创建在不同的库里,因为每个库可以配置不同的存储策略。创建一个库时,除SQL标准的选项外,应用还可以指定保留时长、副本数、内存块个数、时间精度、文件块里最大最小记录条数、是否压缩、一个数据文件覆盖的天数等多种参数。比如:
|
||||||
|
|
||||||
```cmd
|
```cmd
|
||||||
CREATE DATABASE power KEEP 365 DAYS 10 REPLICA 3 BLOCKS 4;
|
CREATE DATABASE power KEEP 365 DAYS 10 BLOCKS 4;
|
||||||
```
|
```
|
||||||
上述语句将创建一个名为power的库,这个库的数据将保留365天(超过365天将被自动删除),每10天一个数据文件,副本数为3, 内存块数为4。详细的语法及参数请见<a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL</a>
|
上述语句将创建一个名为power的库,这个库的数据将保留365天(超过365天将被自动删除),每10天一个数据文件,内存块数为4。详细的语法及参数请见<a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL</a>
|
||||||
|
|
||||||
创建库之后,需要使用SQL命令USE将当前库切换过来,例如:
|
创建库之后,需要使用SQL命令USE将当前库切换过来,例如:
|
||||||
|
|
||||||
|
@ -27,13 +27,15 @@ USE power;
|
||||||
- 处于两个不同库的表是不能进行JOIN操作的。
|
- 处于两个不同库的表是不能进行JOIN操作的。
|
||||||
|
|
||||||
## 创建超级表
|
## 创建超级表
|
||||||
一个物联网系统,往往存在多种类型的设备,比如对于电网,存在智能电表、变压器、母线、开关等等。为便于多表之间的聚合,使用TDengine, 需要对每个类型的设备创建一超级表。以表一中的智能电表为例,可以使用如下的SQL命令创建超级表:
|
一个物联网系统,往往存在多种类型的设备,比如对于电网,存在智能电表、变压器、母线、开关等等。为便于多表之间的聚合,使用TDengine, 需要对每个类型的数据采集点创建一超级表。以表一中的智能电表为例,可以使用如下的SQL命令创建超级表:
|
||||||
```cmd
|
```cmd
|
||||||
CREATE TABLE meters (ts timestamp, current float, voltage int, phase float) TAGS (location binary(64), groupdId int);
|
CREATE TABLE meters (ts timestamp, current float, voltage int, phase float) TAGS (location binary(64), groupdId int);
|
||||||
```
|
```
|
||||||
与创建普通表一样,创建表时,需要提供表名(示例中为meters),表结构Schema,即数据列的定义,为采集的物理量(示例中为ts, current, voltage, phase),数据类型可以为整型、浮点型、字符串等。除此之外,还需要提供标签的schema (示例中为location, groupId),标签的数据类型可以为整型、浮点型、字符串等。采集点的静态属性往往可以作为标签,比如采集点的地理位置、设备型号、设备组ID、管理员ID等等。标签的schema可以事后增加、删除、修改。具体定义以及细节请见 <a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL </a>一节。
|
与创建普通表一样,创建表时,需要提供表名(示例中为meters),表结构Schema,即数据列的定义。第一列必须为时间戳(示例中为ts),其他列为采集的物理量(示例中为current, voltage, phase),数据类型可以为整型、浮点型、字符串等。除此之外,还需要提供标签的schema (示例中为location, groupId),标签的数据类型可以为整型、浮点型、字符串等。采集点的静态属性往往可以作为标签,比如采集点的地理位置、设备型号、设备组ID、管理员ID等等。标签的schema可以事后增加、删除、修改。具体定义以及细节请见 <a href="https://www.taosdata.com/cn/documentation20/taos-sql/">TAOS SQL </a>一节。
|
||||||
|
|
||||||
每一种类型的数据采集点需要建立一个超级表,因此一个物联网系统,往往会有多个超级表。一个系统可以有多个DB,一个DB里可以有一到多个超级表。
|
每一种类型的数据采集点需要建立一个超级表,因此一个物联网系统,往往会有多个超级表。对于电网,我们就需要对智能电表、变压器、母线、开关等都建立一个超级表。在物联网中,一个设备就可能有多个数据采集点(比如一台风力发电的风机,有的采集点采集电流、电压等电参数,有的采集点采集温度、湿度、风向等环境参数),这个时候,对这一类型的设备,需要建立多张超级表。一张超级表里包含的采集物理量必须是同时采集的(时间戳是一致的)。
|
||||||
|
|
||||||
|
一张超级表最多容许1024列,如果一个采集点采集的物理量个数超过1024,需要建多张超级表来处理。一个系统可以有多个DB,一个DB里可以有一到多个超级表。
|
||||||
|
|
||||||
## 创建表
|
## 创建表
|
||||||
TDengine对每个数据采集点需要独立建表。与标准的关系型数据一样,一张表有表名,Schema,但除此之外,还可以带有一到多个标签。创建时,需要使用超级表做模板,同时指定标签的具体值。以表一中的智能电表为例,可以使用如下的SQL命令建表:
|
TDengine对每个数据采集点需要独立建表。与标准的关系型数据一样,一张表有表名,Schema,但除此之外,还可以带有一到多个标签。创建时,需要使用超级表做模板,同时指定标签的具体值。以表一中的智能电表为例,可以使用如下的SQL命令建表:
|
||||||
|
@ -51,5 +53,7 @@ INSERT INTO d1001 USING METERS TAGS ("Beijng.Chaoyang", 2) VALUES (now, 10.2, 21
|
||||||
```
|
```
|
||||||
上述SQL语句将记录(now, 10.2, 219, 0.32) 插入进表d1001。如果表d1001还未创建,则使用超级表meters做模板自动创建,同时打上标签值“Beijing.Chaoyang", 2。
|
上述SQL语句将记录(now, 10.2, 219, 0.32) 插入进表d1001。如果表d1001还未创建,则使用超级表meters做模板自动创建,同时打上标签值“Beijing.Chaoyang", 2。
|
||||||
|
|
||||||
**多列模型**:TDengine支持多列模型,只要这些物理量是同时采集的,这些量就可以作为不同列放在同一张表里。有的数据采集点有多组采集量,每一组的数据采集时间是不一样的,这时需要对同一个采集点建多张表。但还有一种极限的设计,单列模型,无论是否同时采集,每个采集的物理量单独建表。TDengine建议,只要采集时间一致,就采用多列模型,因为插入效率以及存储效率更高。TDengine支持最大的列数为1024列。
|
## 多列模型 vs 单列模型
|
||||||
|
TDengine支持多列模型,只要物理量是一个数据采集点同时采集的(时间戳一致),这些量就可以作为不同列放在一张超级表里。但还有一种极限的设计,单列模型,每个采集的物理量都单独建表,因此每种类型的物理量都单独建立一超级表。比如电流、电压、相位,就建三张超级表。
|
||||||
|
|
||||||
|
TDengine建议尽可能采用多列模型,因为插入效率以及存储效率更高。但对于有些场景,一个采集点的采集量的种类经常变化,这个时候,如果采用多列模型,就需要频繁修改超级表的结构定义,让应用变的复杂,这个时候,采用单列模型就会显得简单。
|
||||||
|
|
|
@ -12,7 +12,7 @@ TDengine 采用 SQL 作为查询语言。应用程序可以通过 C/C++, Java, G
|
||||||
- 标签和数值的多种过滤条件:\>, \<, =, \<>, like 等
|
- 标签和数值的多种过滤条件:\>, \<, =, \<>, like 等
|
||||||
- 聚合结果的分组(Group by)、排序(Order by)、约束输出(Limit/Offset)
|
- 聚合结果的分组(Group by)、排序(Order by)、约束输出(Limit/Offset)
|
||||||
- 数值列及聚合结果的四则运算
|
- 数值列及聚合结果的四则运算
|
||||||
- 时间戳对齐的连接查询(Join Query)操作
|
- 时间戳对齐的连接查询(Join Query: 隐式连接)操作
|
||||||
- 多种聚合/计算函数: count, max, min, avg, sum, twa, stddev, leastsquares, top, bottom, first, last, percentile, apercentile, last_row, spread, diff等
|
- 多种聚合/计算函数: count, max, min, avg, sum, twa, stddev, leastsquares, top, bottom, first, last, percentile, apercentile, last_row, spread, diff等
|
||||||
|
|
||||||
例如:在TAOS Shell中,从表d1001中查询出vlotage > 215的记录,按时间降序排列,仅仅输出2条。
|
例如:在TAOS Shell中,从表d1001中查询出vlotage > 215的记录,按时间降序排列,仅仅输出2条。
|
||||||
|
|
|
@ -1,16 +1,46 @@
|
||||||
#TDengine 集群安装、管理
|
# TDengine 集群安装、管理
|
||||||
|
|
||||||
多个taosd的运行实例可以组成一个集群,以保证TDengine的高可靠运行,并提供水平扩展能力。要了解TDengine 2.0的集群管理,需要对集群的基本概念有所了解,请看TDengine 2.0整体架构一章。
|
多个taosd的运行实例可以组成一个集群,以保证TDengine的高可靠运行,并提供水平扩展能力。要了解TDengine 2.0的集群管理,需要对集群的基本概念有所了解,请看TDengine 2.0整体架构一章。而且在安装集群之前,请按照[《立即开始》](https://www.taosdata.com/cn/getting-started20/)一章安装并体验过单节点功能。
|
||||||
|
|
||||||
集群的每个节点是由End Point来唯一标识的,End Point是由FQDN(Fully Qualified Domain Name)外加Port组成,比如 h1.taosdata.com:6030。一般FQDN就是服务器的hostname,可通过Linux命令“hostname"获取。端口是这个节点对外服务的端口号,缺省是6030,但可以通过taos.cfg里配置参数serverPort进行修改。
|
集群的每个节点是由End Point来唯一标识的,End Point是由FQDN(Fully Qualified Domain Name)外加Port组成,比如 h1.taosdata.com:6030。一般FQDN就是服务器的hostname,可通过Linux命令“hostname"获取。端口是这个节点对外服务的端口号,缺省是6030,但可以通过taos.cfg里配置参数serverPort进行修改。一个节点可能配置了多个hostname, TDengine会自动获取第一个,但也可以通过taos.cfg里配置参数fqdn进行指定。
|
||||||
|
|
||||||
TDengine的集群管理极其简单,除添加和删除节点需要人工干预之外,其他全部是自动完成,最大程度的降低了运维的工作量。本章对集群管理的操作做详细的描述。
|
TDengine的集群管理极其简单,除添加和删除节点需要人工干预之外,其他全部是自动完成,最大程度的降低了运维的工作量。本章对集群管理的操作做详细的描述。
|
||||||
|
|
||||||
##安装、创建第一个节点
|
## 准备工作
|
||||||
|
|
||||||
集群是由一个一个dnode组成的,是从一个dnode的创建开始的。创建第一个节点很简单,就按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法进行安装、启动即可。
|
**第一步**:如果搭建集群的节点中,存有之前的测试数据、装过1.X的版本,或者装过其他版本的TDengine,请先将其删除,并清空所有数据,具体步骤请参考博客[《TDengine多种安装包的安装和卸载》](https://www.taosdata.com/blog/2019/08/09/566.html )
|
||||||
|
|
||||||
启动后,请执行taos, 启动taos shell,从shell里执行命令"show dnodes;",如下所示:
|
**第二步**:建议关闭防火墙,至少保证端口:6030 - 6041的TCP和UDP端口都是开放的。**强烈建议**先关闭防火墙,集群搭建完毕之后,再来配置端口;
|
||||||
|
|
||||||
|
**第三步**:在所有节点安装TDengine,且版本必须是一致的,**但不要启动taosd**;
|
||||||
|
|
||||||
|
**第四步**:检查、配置所有节点的FQDN:
|
||||||
|
|
||||||
|
1. 每个节点上执行命令`hostname`,查看和确认所有节点的hostname是不相同的;
|
||||||
|
2. 每个节点上执行`ping host`, 其中host是其他节点的hostname, 看能否ping通其它节点; 如果不能ping通,需要检查网络设置, 或/etc/hosts文件,或DNS的配置。如果无法ping通,是无法组成集群的。
|
||||||
|
3. 每个节点的FQDN就是输出的hostname外加端口号,比如h1.taosdata.com:6030
|
||||||
|
|
||||||
|
**第五步**:修改TDengine的配置文件(所有节点的文件/etc/taos/taos.cfg都需要修改)。假设准备启动的第一个节点End Point为 h1.taosdata.com:6030, 那么以下几个参数与集群相关:
|
||||||
|
|
||||||
|
```
|
||||||
|
// firstEp 是每个节点启动后连接的第一个节点
|
||||||
|
firstEp h1.taosdata.com:6030
|
||||||
|
|
||||||
|
// 配置本节点的FQDN,如果本机只有一个hostname, 无需配置
|
||||||
|
fqdn h1.taosdata.com
|
||||||
|
|
||||||
|
// 配置本节点的端口号,缺省是6030
|
||||||
|
serverPort 6030
|
||||||
|
|
||||||
|
// 副本数为偶数的时候,需要配置,请参考《Arbitrator的使用》的部分
|
||||||
|
arbitrator ha.taosdata.com:6030
|
||||||
|
```
|
||||||
|
|
||||||
|
一定要修改的参数是firstEp, 其他参数可不做任何修改,除非你很清楚为什么要修改。
|
||||||
|
|
||||||
|
## 启动第一个节点
|
||||||
|
|
||||||
|
按照[《立即开始》](https://www.taosdata.com/cn/getting-started20/)里的指示,启动第一个节点h1.taosdata.com,然后执行taos, 启动taos shell,从shell里执行命令"show dnodes;",如下所示:
|
||||||
```
|
```
|
||||||
Welcome to the TDengine shell from Linux, Client Version:2.0.0.0
|
Welcome to the TDengine shell from Linux, Client Version:2.0.0.0
|
||||||
Copyright (c) 2017 by TAOS Data, Inc. All rights reserved.
|
Copyright (c) 2017 by TAOS Data, Inc. All rights reserved.
|
||||||
|
@ -25,71 +55,64 @@ taos>
|
||||||
```
|
```
|
||||||
上述命令里,可以看到这个刚启动的这个节点的End Point是:h1.taos.com:6030
|
上述命令里,可以看到这个刚启动的这个节点的End Point是:h1.taos.com:6030
|
||||||
|
|
||||||
## 安装、创建后续节点
|
## 启动后续节点
|
||||||
|
|
||||||
将新的节点添加到现有集群,具体有以下几步:
|
将后续的节点添加到现有集群,具体有以下几步:
|
||||||
|
|
||||||
1. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法进行安装,**但不要启动taosd**
|
1. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法在每个节点启动taosd。
|
||||||
|
|
||||||
2. 如果是使用涛思数据的官方安装包进行安装,在安装结束时,会询问集群的End Port, 输入第一个节点的End Point即可。如果是源码安装,请编辑配置文件taos.cfg(缺省是在/etc/taos/目录),增加一行:
|
2. 在第一个节点,使用CLI程序taos, 登录进TDengine系统, 执行命令:
|
||||||
|
|
||||||
```
|
|
||||||
firstEp h1.taos.com:6030
|
|
||||||
```
|
|
||||||
|
|
||||||
请注意将示例的“h1.taos.com:6030" 替换为你自己第一个节点的End Point
|
|
||||||
|
|
||||||
3. 按照["立即开始“](https://www.taosdata.com/cn/getting-started/)一章的方法启动taosd
|
|
||||||
|
|
||||||
4. 在Linux shell里执行命令"hostname"找出本机的FQDN, 假设为h2.taos.com。如果无法找到,可以查看taosd日志文件taosdlog.0里前面几行日志(一般在/var/log/taos目录),fqdn以及port都会打印出来。
|
|
||||||
|
|
||||||
5. 在第一个节点,使用CLI程序taos, 登录进TDengine系统, 使用命令:
|
|
||||||
|
|
||||||
```
|
```
|
||||||
CREATE DNODE "h2.taos.com:6030";
|
CREATE DNODE "h2.taos.com:6030";
|
||||||
```
|
```
|
||||||
|
|
||||||
将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。请注意将示例的“h2.taos.com:6030" 替换为你自己第一个节点的End Point
|
将新节点的End Point (准备工作中第四步获知的) 添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。请注意将示例的“h2.taos.com:6030" 替换为这个新节点的End Point。
|
||||||
|
|
||||||
6. 使用命令
|
3. 然后执行命令
|
||||||
|
|
||||||
```
|
```
|
||||||
SHOW DNODES;
|
SHOW DNODES;
|
||||||
```
|
```
|
||||||
|
|
||||||
查看新节点是否被成功加入。
|
查看新节点是否被成功加入。如果该被加入的节点处于离线状态,请做两个检查
|
||||||
|
|
||||||
|
- 查看该节点的taosd是否正常工作,如果没有正常运行,需要先检查为什么
|
||||||
|
- 查看该节点taosd日志文件taosdlog.0里前面几行日志(一般在/var/log/taos目录),看日志里输出的该节点fqdn以及端口号是否为刚添加的End Point。如果不一致,需要将正确的End Point添加进去。
|
||||||
|
|
||||||
按照上述步骤可以源源不断的将新的节点加入到集群。
|
按照上述步骤可以源源不断的将新的节点加入到集群。
|
||||||
|
|
||||||
**提示:**
|
**提示:**
|
||||||
|
|
||||||
- firstEp, secondEp这两个参数仅仅在该节点第一次加入集群时有作用,加入集群后,该节点会保存最新的mnode的End Point列表,不再依赖这两个参数。
|
- firstEp这个参数仅仅在该节点第一次加入集群时有作用,加入集群后,该节点会保存最新的mnode的End Point列表,不再依赖这两个参数。
|
||||||
- 两个没有配置firstEp, secondEp参数的dnode启动后,会独立运行起来。这个时候,无法将其中一个节点加入到另外一个节点,形成集群。**无法将两个独立的集群合并成为新的集群**。
|
- 两个没有配置firstEp参数的dnode启动后,会独立运行起来。这个时候,无法将其中一个节点加入到另外一个节点,形成集群。**无法将两个独立的集群合并成为新的集群**。
|
||||||
|
|
||||||
##节点管理
|
## 节点管理
|
||||||
|
|
||||||
###添加节点
|
### 添加节点
|
||||||
执行CLI程序taos, 使用root账号登录进系统, 执行:
|
执行CLI程序taos, 使用root账号登录进系统, 执行:
|
||||||
```
|
```
|
||||||
CREATE DNODE "fqdn:port";
|
CREATE DNODE "fqdn:port";
|
||||||
```
|
```
|
||||||
将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。一个节点对外服务的fqdn和port可以通过配置文件taos.cfg进行配置,缺省是自动获取。
|
将新节点的End Point添加进集群的EP列表。**"fqdn:port"需要用双引号引起来**,否则出错。一个节点对外服务的fqdn和port可以通过配置文件taos.cfg进行配置,缺省是自动获取。
|
||||||
|
|
||||||
###删除节点
|
### 删除节点
|
||||||
执行CLI程序taos, 使用root账号登录进TDengine系统,执行:
|
执行CLI程序taos, 使用root账号登录进TDengine系统,执行:
|
||||||
|
|
||||||
```
|
```
|
||||||
DROP DNODE "fqdn:port";
|
DROP DNODE "fqdn:port";
|
||||||
```
|
```
|
||||||
其中fqdn是被删除的节点的FQDN,port是其对外服务器的端口号
|
其中fqdn是被删除的节点的FQDN,port是其对外服务器的端口号
|
||||||
|
|
||||||
###查看节点
|
### 查看节点
|
||||||
执行CLI程序taos,使用root账号登录进TDengine系统,执行:
|
执行CLI程序taos,使用root账号登录进TDengine系统,执行:
|
||||||
|
|
||||||
```
|
```
|
||||||
SHOW DNODES;
|
SHOW DNODES;
|
||||||
```
|
```
|
||||||
它将列出集群中所有的dnode,每个dnode的fqdn:port, 状态(ready, offline等),vnode数目,还未使用的vnode数目等信息。在添加或删除一个节点后,可以使用该命令查看。
|
它将列出集群中所有的dnode,每个dnode的fqdn:port, 状态(ready, offline等),vnode数目,还未使用的vnode数目等信息。在添加或删除一个节点后,可以使用该命令查看。
|
||||||
|
|
||||||
###查看虚拟节点组
|
### 查看虚拟节点组
|
||||||
|
|
||||||
为充分利用多核技术,并提供scalability,数据需要分片处理。因此TDengine会将一个DB的数据切分成多份,存放在多个vnode里。这些vnode可能分布在多个dnode里,这样就实现了水平扩展。一个vnode仅仅属于一个DB,但一个DB可以有多个vnode。vnode的是mnode根据当前系统资源的情况,自动进行分配的,无需任何人工干预。
|
为充分利用多核技术,并提供scalability,数据需要分片处理。因此TDengine会将一个DB的数据切分成多份,存放在多个vnode里。这些vnode可能分布在多个dnode里,这样就实现了水平扩展。一个vnode仅仅属于一个DB,但一个DB可以有多个vnode。vnode的是mnode根据当前系统资源的情况,自动进行分配的,无需任何人工干预。
|
||||||
|
|
||||||
|
@ -97,7 +120,7 @@ SHOW DNODES;
|
||||||
```
|
```
|
||||||
SHOW VGROUPS;
|
SHOW VGROUPS;
|
||||||
```
|
```
|
||||||
##vnode的高可用性
|
## vnode的高可用性
|
||||||
TDengine通过多副本的机制来提供系统的高可用性,包括vnode和mnode的高可用性。
|
TDengine通过多副本的机制来提供系统的高可用性,包括vnode和mnode的高可用性。
|
||||||
|
|
||||||
vnode的副本数是与DB关联的,一个集群里可以有多个DB,根据运营的需求,每个DB可以配置不同的副本数。创建数据库时,通过参数replica 指定副本数(缺省为1)。如果副本数为1,系统的可靠性无法保证,只要数据所在的节点宕机,就将无法提供服务。集群的节点数必须大于等于副本数,否则创建表时将返回错误“more dnodes are needed"。比如下面的命令将创建副本数为3的数据库demo:
|
vnode的副本数是与DB关联的,一个集群里可以有多个DB,根据运营的需求,每个DB可以配置不同的副本数。创建数据库时,通过参数replica 指定副本数(缺省为1)。如果副本数为1,系统的可靠性无法保证,只要数据所在的节点宕机,就将无法提供服务。集群的节点数必须大于等于副本数,否则创建表时将返回错误“more dnodes are needed"。比如下面的命令将创建副本数为3的数据库demo:
|
||||||
|
@ -111,7 +134,7 @@ CREATE DATABASE demo replica 3;
|
||||||
|
|
||||||
因为vnode的引入,无法简单的给出结论:“集群中过半dnode工作,集群就应该工作”。但是对于简单的情形,很好下结论。比如副本数为3,只有三个dnode,那如果仅有一个节点不工作,整个集群还是可以正常工作的,但如果有两个节点不工作,那整个集群就无法正常工作了。
|
因为vnode的引入,无法简单的给出结论:“集群中过半dnode工作,集群就应该工作”。但是对于简单的情形,很好下结论。比如副本数为3,只有三个dnode,那如果仅有一个节点不工作,整个集群还是可以正常工作的,但如果有两个节点不工作,那整个集群就无法正常工作了。
|
||||||
|
|
||||||
##Mnode的高可用性
|
## Mnode的高可用性
|
||||||
TDengine集群是由mnode (taosd的一个模块,逻辑节点) 负责管理的,为保证mnode的高可用,可以配置多个mnode副本,副本数由系统配置参数numOfMnodes决定,有效范围为1-3。为保证元数据的强一致性,mnode副本之间是通过同步的方式进行数据复制的。
|
TDengine集群是由mnode (taosd的一个模块,逻辑节点) 负责管理的,为保证mnode的高可用,可以配置多个mnode副本,副本数由系统配置参数numOfMnodes决定,有效范围为1-3。为保证元数据的强一致性,mnode副本之间是通过同步的方式进行数据复制的。
|
||||||
|
|
||||||
一个集群有多个dnode, 但一个dnode至多运行一个mnode实例。多个dnode情况下,哪个dnode可以作为mnode呢?这是完全由系统根据整个系统资源情况,自动指定的。用户可通过CLI程序taos,在TDengine的console里,执行如下命令:
|
一个集群有多个dnode, 但一个dnode至多运行一个mnode实例。多个dnode情况下,哪个dnode可以作为mnode呢?这是完全由系统根据整个系统资源情况,自动指定的。用户可通过CLI程序taos,在TDengine的console里,执行如下命令:
|
||||||
|
@ -125,7 +148,7 @@ SHOW MNODES;
|
||||||
|
|
||||||
**注意:**一个TDengine高可用系统,无论是vnode还是mnode, 都必须配置多个副本。
|
**注意:**一个TDengine高可用系统,无论是vnode还是mnode, 都必须配置多个副本。
|
||||||
|
|
||||||
##负载均衡
|
## 负载均衡
|
||||||
|
|
||||||
有三种情况,将触发负载均衡,而且都无需人工干预。
|
有三种情况,将触发负载均衡,而且都无需人工干预。
|
||||||
|
|
||||||
|
@ -142,8 +165,9 @@ SHOW MNODES;
|
||||||
|
|
||||||
**注意:**如果一个虚拟节点组(包括mnode组)里每个节点都处于离线或unsynced状态,必须等该虚拟节点组里的所有节点都上线、都能交换状态信息后,才能选出Master,该虚拟节点组才能对外提供服务。比如整个集群有3个节点,副本数为3,如果3个节点都宕机,然后2个节点重启,是无法工作的,只有等3个节点都重启成功,才能对外服务。
|
**注意:**如果一个虚拟节点组(包括mnode组)里每个节点都处于离线或unsynced状态,必须等该虚拟节点组里的所有节点都上线、都能交换状态信息后,才能选出Master,该虚拟节点组才能对外提供服务。比如整个集群有3个节点,副本数为3,如果3个节点都宕机,然后2个节点重启,是无法工作的,只有等3个节点都重启成功,才能对外服务。
|
||||||
|
|
||||||
##Arbitrator的使用
|
## Arbitrator的使用
|
||||||
|
|
||||||
如果副本数为偶数,当一个vnode group里一半或超过一半的vnode不工作时,是无法从中选出master的。同理,一半或超过一半的mnode不工作时,是无法选出mnode的master的,因为存在“split brain”问题。为解决这个问题,TDengine引入了arbitrator的概念。Arbitrator模拟一个vnode或mnode在工作,但只简单的负责网络连接,不处理任何数据插入或访问。只要包含arbitrator在内,超过半数的vnode或mnode工作,那么该vnode group或mnode组就可以正常的提供数据插入或查询服务。比如对于副本数为2的情形,如果一个节点A离线,但另外一个节点B正常,而且能连接到arbitrator, 那么节点B就能正常工作。
|
如果副本数为偶数,当一个vnode group里一半或超过一半的vnode不工作时,是无法从中选出master的。同理,一半或超过一半的mnode不工作时,是无法选出mnode的master的,因为存在“split brain”问题。为解决这个问题,TDengine引入了arbitrator的概念。Arbitrator模拟一个vnode或mnode在工作,但只简单的负责网络连接,不处理任何数据插入或访问。只要包含arbitrator在内,超过半数的vnode或mnode工作,那么该vnode group或mnode组就可以正常的提供数据插入或查询服务。比如对于副本数为2的情形,如果一个节点A离线,但另外一个节点B正常,而且能连接到arbitrator, 那么节点B就能正常工作。
|
||||||
|
|
||||||
TDengine安装包里带有一个执行程序tarbitrator, 找任何一台Linux服务器运行它即可。该程序对系统资源几乎没有要求,只需要保证有网络连接即可。该应用的命令行参数`-p`可以指定其对外服务的端口号,缺省是6030。配置每个taosd实例时,可以在配置文件taos.cfg里将参数arbitrator设置为arbitrator的End Point。如果该参数配置了,当副本数为偶数数,系统将自动连接配置的arbitrator。
|
TDengine安装包里带有一个执行程序tarbitrator, 找任何一台Linux服务器运行它即可。该程序对系统资源几乎没有要求,只需要保证有网络连接即可。该应用的命令行参数`-p`可以指定其对外服务的端口号,缺省是6030。配置每个taosd实例时,可以在配置文件taos.cfg里将参数arbitrator设置为arbitrator的End Point。如果该参数配置了,当副本数为偶数数,系统将自动连接配置的arbitrator。
|
||||||
|
|
||||||
|
|
|
@ -25,8 +25,8 @@
|
||||||
|
|
||||||
1. 确保客户端与服务端版本号是完全一致的,开源社区版和企业版也不能混用
|
1. 确保客户端与服务端版本号是完全一致的,开源社区版和企业版也不能混用
|
||||||
2. 在服务器,执行 `systemctl status taosd` 检查*taosd*运行状态。如果没有运行,启动*taosd*
|
2. 在服务器,执行 `systemctl status taosd` 检查*taosd*运行状态。如果没有运行,启动*taosd*
|
||||||
3. 确认客户端连接时指定了正确的服务器IP地址
|
3. 确认客户端连接时指定了正确的服务器Fully Qualified Domain Name(FQDN,通常情况下是服务器的hostname)
|
||||||
4. ping服务器IP,如果没有反应,请检查你的网络
|
4. ping服务器FQDN,如果没有反应,请检查你的网络,DNS设置,hosts 文件
|
||||||
5. 检查防火墙设置,确认TCP/UDP 端口6030-6039 是打开的
|
5. 检查防火墙设置,确认TCP/UDP 端口6030-6039 是打开的
|
||||||
6. 对于Linux上的JDBC(ODBC, Python, Go等接口类似)连接, 确保*libtaos.so*在目录*/usr/local/lib/taos*里, 并且*/usr/local/lib/taos*在系统库函数搜索路径*LD_LIBRARY_PATH*里
|
6. 对于Linux上的JDBC(ODBC, Python, Go等接口类似)连接, 确保*libtaos.so*在目录*/usr/local/lib/taos*里, 并且*/usr/local/lib/taos*在系统库函数搜索路径*LD_LIBRARY_PATH*里
|
||||||
7. 对于windows上的JDBC, ODBC, Python, Go等连接,确保*driver/c/taos.dll*在你的系统搜索目录里 (建议*taos.dll*放在目录 *C:\Windows\System32*)
|
7. 对于windows上的JDBC, ODBC, Python, Go等连接,确保*driver/c/taos.dll*在你的系统搜索目录里 (建议*taos.dll*放在目录 *C:\Windows\System32*)
|
||||||
|
|
|
@ -24,6 +24,7 @@ INSERT INTO d1001 VALUES (1538548685000, 10.3, 219, 0.31) (1538548695000, 12.6,
|
||||||
|
|
||||||
- 要提高写入效率,需要批量写入。一批写入的记录条数越多,插入效率就越高。但一条记录不能超过16K,一条SQL语句总长度不能超过64K(可通过参数maxSQLLength配置,最大可配置为8M)。
|
- 要提高写入效率,需要批量写入。一批写入的记录条数越多,插入效率就越高。但一条记录不能超过16K,一条SQL语句总长度不能超过64K(可通过参数maxSQLLength配置,最大可配置为8M)。
|
||||||
- TDengine支持多线程同时写入,要进一步提高写入速度,一个客户端需要打开20个以上的线程同时写。但线程数达到一定数量后,无法再提高,甚至还会下降,因为线程切频繁切换,带来额外开销。
|
- TDengine支持多线程同时写入,要进一步提高写入速度,一个客户端需要打开20个以上的线程同时写。但线程数达到一定数量后,无法再提高,甚至还会下降,因为线程切频繁切换,带来额外开销。
|
||||||
|
- 对同一张表,如果新插入记录的时间戳已经存在,新记录将被直接抛弃,也就是说,在一张表里,时间戳必须是唯一的。如果应用自动生成记录,很有可能生成的时间戳是一样的,这样,成功插入的记录条数会小于应用插入的记录条数。
|
||||||
|
|
||||||
## Prometheus直接写入
|
## Prometheus直接写入
|
||||||
[Prometheus](https://www.prometheus.io/)作为Cloud Native Computing Fundation毕业的项目,在性能监控以及K8S性能监控领域有着非常广泛的应用。TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Prometheus做简单配置,无需任何代码,就可将Prometheus采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
|
[Prometheus](https://www.prometheus.io/)作为Cloud Native Computing Fundation毕业的项目,在性能监控以及K8S性能监控领域有着非常广泛的应用。TDengine提供一个小工具[Bailongma](https://github.com/taosdata/Bailongma),只需在Prometheus做简单配置,无需任何代码,就可将Prometheus采集的数据直接写入TDengine,并按规则在TDengine自动创建库和相关表项。博文[用Docker容器快速搭建一个Devops监控Demo](https://www.taosdata.com/blog/2020/02/03/1189.html)即是采用bailongma将Prometheus和Telegraf的数据写入TDengine中的示例,可以参考。
|
||||||
|
|
|
@ -54,7 +54,7 @@ cp ${compile_dir}/build/lib/${libfile} ${pkg_dir}${install_home_pat
|
||||||
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
|
cp ${compile_dir}/../src/inc/taos.h ${pkg_dir}${install_home_path}/include
|
||||||
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
|
cp ${compile_dir}/../src/inc/taoserror.h ${pkg_dir}${install_home_path}/include
|
||||||
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
|
cp -r ${top_dir}/tests/examples/* ${pkg_dir}${install_home_path}/examples
|
||||||
cp -r ${top_dir}/src/connector/grafana ${pkg_dir}${install_home_path}/connector
|
cp -r ${top_dir}/src/connector/grafanaplugin ${pkg_dir}${install_home_path}/connector
|
||||||
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
|
cp -r ${top_dir}/src/connector/python ${pkg_dir}${install_home_path}/connector
|
||||||
cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector
|
cp -r ${top_dir}/src/connector/go ${pkg_dir}${install_home_path}/connector
|
||||||
cp ${compile_dir}/build/lib/taos-jdbcdriver*dist.* ${pkg_dir}${install_home_path}/connector
|
cp ${compile_dir}/build/lib/taos-jdbcdriver*dist.* ${pkg_dir}${install_home_path}/connector
|
||||||
|
|
|
@ -10,6 +10,7 @@ set -e
|
||||||
# -o [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...]
|
# -o [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...]
|
||||||
# -V [stable | beta]
|
# -V [stable | beta]
|
||||||
# -l [full | lite]
|
# -l [full | lite]
|
||||||
|
# -s [static | dynamic]
|
||||||
# -n [2.0.0.3]
|
# -n [2.0.0.3]
|
||||||
|
|
||||||
# set parameters by default value
|
# set parameters by default value
|
||||||
|
@ -18,9 +19,10 @@ verType=stable # [stable, beta]
|
||||||
cpuType=x64 # [aarch32 | aarch64 | x64 | x86 | mips64 ...]
|
cpuType=x64 # [aarch32 | aarch64 | x64 | x86 | mips64 ...]
|
||||||
osType=Linux # [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...]
|
osType=Linux # [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...]
|
||||||
pagMode=full # [full | lite]
|
pagMode=full # [full | lite]
|
||||||
|
soMode=dynamic # [static | dynamic]
|
||||||
verNumber=""
|
verNumber=""
|
||||||
|
|
||||||
while getopts "hv:V:c:o:l:n:" arg
|
while getopts "hv:V:c:o:l:s:n:" arg
|
||||||
do
|
do
|
||||||
case $arg in
|
case $arg in
|
||||||
v)
|
v)
|
||||||
|
@ -39,6 +41,10 @@ do
|
||||||
#echo "pagMode=$OPTARG"
|
#echo "pagMode=$OPTARG"
|
||||||
pagMode=$(echo $OPTARG)
|
pagMode=$(echo $OPTARG)
|
||||||
;;
|
;;
|
||||||
|
s)
|
||||||
|
#echo "soMode=$OPTARG"
|
||||||
|
soMode=$(echo $OPTARG)
|
||||||
|
;;
|
||||||
n)
|
n)
|
||||||
#echo "verNumber=$OPTARG"
|
#echo "verNumber=$OPTARG"
|
||||||
verNumber=$(echo $OPTARG)
|
verNumber=$(echo $OPTARG)
|
||||||
|
@ -53,6 +59,7 @@ do
|
||||||
echo " -o [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...] "
|
echo " -o [Linux | Kylin | Alpine | Raspberrypi | Darwin | Windows | ...] "
|
||||||
echo " -V [stable | beta] "
|
echo " -V [stable | beta] "
|
||||||
echo " -l [full | lite] "
|
echo " -l [full | lite] "
|
||||||
|
echo " -s [static | dynamic] "
|
||||||
echo " -n [version number] "
|
echo " -n [version number] "
|
||||||
exit 0
|
exit 0
|
||||||
;;
|
;;
|
||||||
|
@ -63,7 +70,7 @@ do
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "verMode=${verMode} verType=${verType} cpuType=${cpuType} osType=${osType} pagMode=${pagMode} verNumber=${verNumber}"
|
echo "verMode=${verMode} verType=${verType} cpuType=${cpuType} osType=${osType} pagMode=${pagMode} soMode=${soMode} verNumber=${verNumber}"
|
||||||
|
|
||||||
curr_dir=$(pwd)
|
curr_dir=$(pwd)
|
||||||
|
|
||||||
|
@ -223,9 +230,9 @@ cd ${compile_dir}
|
||||||
# check support cpu type
|
# check support cpu type
|
||||||
if [[ "$cpuType" == "x64" ]] || [[ "$cpuType" == "aarch64" ]] || [[ "$cpuType" == "aarch32" ]] || [[ "$cpuType" == "mips64" ]] ; then
|
if [[ "$cpuType" == "x64" ]] || [[ "$cpuType" == "aarch64" ]] || [[ "$cpuType" == "aarch32" ]] || [[ "$cpuType" == "mips64" ]] ; then
|
||||||
if [ "$verMode" != "cluster" ]; then
|
if [ "$verMode" != "cluster" ]; then
|
||||||
cmake ../ -DCPUTYPE=${cpuType} -DPAGMODE=${pagMode}
|
cmake ../ -DCPUTYPE=${cpuType} -DPAGMODE=${pagMode} -DSOMODE=${soMode}
|
||||||
else
|
else
|
||||||
cmake ../../ -DCPUTYPE=${cpuType}
|
cmake ../../ -DCPUTYPE=${cpuType} -DSOMODE=${soMode}
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "input cpuType=${cpuType} error!!!"
|
echo "input cpuType=${cpuType} error!!!"
|
||||||
|
|
|
@ -61,7 +61,7 @@ cp %{_compiledir}/build/bin/taosdemo %{buildroot}%{homepath}/bin
|
||||||
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
|
cp %{_compiledir}/build/lib/${libfile} %{buildroot}%{homepath}/driver
|
||||||
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
|
cp %{_compiledir}/../src/inc/taos.h %{buildroot}%{homepath}/include
|
||||||
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
|
cp %{_compiledir}/../src/inc/taoserror.h %{buildroot}%{homepath}/include
|
||||||
cp -r %{_compiledir}/../src/connector/grafana %{buildroot}%{homepath}/connector
|
cp -r %{_compiledir}/../src/connector/grafanaplugin %{buildroot}%{homepath}/connector
|
||||||
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
|
cp -r %{_compiledir}/../src/connector/python %{buildroot}%{homepath}/connector
|
||||||
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
|
cp -r %{_compiledir}/../src/connector/go %{buildroot}%{homepath}/connector
|
||||||
cp %{_compiledir}/build/lib/taos-jdbcdriver*dist.* %{buildroot}%{homepath}/connector
|
cp %{_compiledir}/build/lib/taos-jdbcdriver*dist.* %{buildroot}%{homepath}/connector
|
||||||
|
|
|
@ -446,7 +446,7 @@ function install_service_on_systemd() {
|
||||||
${csudo} bash -c "echo >> ${tarbitratord_service_config}"
|
${csudo} bash -c "echo >> ${tarbitratord_service_config}"
|
||||||
${csudo} bash -c "echo '[Install]' >> ${tarbitratord_service_config}"
|
${csudo} bash -c "echo '[Install]' >> ${tarbitratord_service_config}"
|
||||||
${csudo} bash -c "echo 'WantedBy=multi-user.target' >> ${tarbitratord_service_config}"
|
${csudo} bash -c "echo 'WantedBy=multi-user.target' >> ${tarbitratord_service_config}"
|
||||||
${csudo} systemctl enable tarbitratord
|
# ${csudo} systemctl enable tarbitratord
|
||||||
|
|
||||||
nginx_service_config="${service_config_dir}/nginxd.service"
|
nginx_service_config="${service_config_dir}/nginxd.service"
|
||||||
${csudo} bash -c "echo '[Unit]' >> ${nginx_service_config}"
|
${csudo} bash -c "echo '[Unit]' >> ${nginx_service_config}"
|
||||||
|
|
|
@ -237,7 +237,7 @@ function install_data() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function install_connector() {
|
function install_connector() {
|
||||||
${csudo} cp -rf ${source_dir}/src/connector/grafana ${install_main_dir}/connector
|
${csudo} cp -rf ${source_dir}/src/connector/grafanaplugin ${install_main_dir}/connector
|
||||||
${csudo} cp -rf ${source_dir}/src/connector/python ${install_main_dir}/connector
|
${csudo} cp -rf ${source_dir}/src/connector/python ${install_main_dir}/connector
|
||||||
${csudo} cp -rf ${source_dir}/src/connector/go ${install_main_dir}/connector
|
${csudo} cp -rf ${source_dir}/src/connector/go ${install_main_dir}/connector
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,7 @@ if [ "$osType" != "Darwin" ]; then
|
||||||
strip ${build_dir}/bin/taos
|
strip ${build_dir}/bin/taos
|
||||||
bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh"
|
bin_files="${build_dir}/bin/taos ${script_dir}/remove_client.sh"
|
||||||
else
|
else
|
||||||
bin_files="${build_dir}/bin/taos ${build_dir}/bin/taosdump ${script_dir}/remove_client.sh ${script_dir}/set_core.sh"
|
bin_files="${build_dir}/bin/taos ${build_dir}/bin/taosdemo ${script_dir}/remove_client.sh ${script_dir}/set_core.sh"
|
||||||
fi
|
fi
|
||||||
lib_files="${build_dir}/lib/libtaos.so.${version}"
|
lib_files="${build_dir}/lib/libtaos.so.${version}"
|
||||||
else
|
else
|
||||||
|
@ -110,7 +110,7 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
|
||||||
if [ "$osType" != "Darwin" ]; then
|
if [ "$osType" != "Darwin" ]; then
|
||||||
cp ${build_dir}/lib/*.jar ${install_dir}/connector
|
cp ${build_dir}/lib/*.jar ${install_dir}/connector
|
||||||
fi
|
fi
|
||||||
cp -r ${connector_dir}/grafana ${install_dir}/connector/
|
cp -r ${connector_dir}/grafanaplugin ${install_dir}/connector/
|
||||||
cp -r ${connector_dir}/python ${install_dir}/connector/
|
cp -r ${connector_dir}/python ${install_dir}/connector/
|
||||||
cp -r ${connector_dir}/go ${install_dir}/connector
|
cp -r ${connector_dir}/go ${install_dir}/connector
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -123,7 +123,7 @@ connector_dir="${code_dir}/connector"
|
||||||
mkdir -p ${install_dir}/connector
|
mkdir -p ${install_dir}/connector
|
||||||
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
|
if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
|
||||||
cp ${build_dir}/lib/*.jar ${install_dir}/connector
|
cp ${build_dir}/lib/*.jar ${install_dir}/connector
|
||||||
cp -r ${connector_dir}/grafana ${install_dir}/connector/
|
cp -r ${connector_dir}/grafanaplugin ${install_dir}/connector/
|
||||||
cp -r ${connector_dir}/python ${install_dir}/connector/
|
cp -r ${connector_dir}/python ${install_dir}/connector/
|
||||||
cp -r ${connector_dir}/go ${install_dir}/connector
|
cp -r ${connector_dir}/go ${install_dir}/connector
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -165,7 +165,7 @@ int32_t balanceAllocVnodes(SVgObj *pVgroup) {
|
||||||
balanceSwapVnodeGid(pVgroup->vnodeGid, pVgroup->vnodeGid + 1);
|
balanceSwapVnodeGid(pVgroup->vnodeGid, pVgroup->vnodeGid + 1);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
int32_t randVal = rand() % 6;
|
int32_t randVal = randIndex++ % 6;
|
||||||
if (randVal == 1) { // 1, 0, 2
|
if (randVal == 1) { // 1, 0, 2
|
||||||
balanceSwapVnodeGid(pVgroup->vnodeGid + 0, pVgroup->vnodeGid + 1);
|
balanceSwapVnodeGid(pVgroup->vnodeGid + 0, pVgroup->vnodeGid + 1);
|
||||||
} else if (randVal == 2) { // 1, 2, 0
|
} else if (randVal == 2) { // 1, 2, 0
|
||||||
|
|
|
@ -327,13 +327,30 @@ JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_getResultSetImp(
|
||||||
STscObj *pObj = pSql->pTscObj;
|
STscObj *pObj = pSql->pTscObj;
|
||||||
|
|
||||||
if (tscIsUpdateQuery(pSql)) {
|
if (tscIsUpdateQuery(pSql)) {
|
||||||
// taos_free_result(pSql); // free result here
|
jniDebug("jobj:%p, conn:%p, update query, no resultset, %p", jobj, pObj, (void *)tres);
|
||||||
jniDebug("jobj:%p, conn:%p, no resultset, %p", jobj, pObj, (void *)tres);
|
|
||||||
return 0;
|
|
||||||
} else {
|
} else {
|
||||||
jniDebug("jobj:%p, conn:%p, get resultset, %p", jobj, pObj, (void *)tres);
|
jniDebug("jobj:%p, conn:%p, get resultset, %p", jobj, pObj, (void *)tres);
|
||||||
return tres;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return tres;
|
||||||
|
}
|
||||||
|
|
||||||
|
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_isUpdateQueryImp(JNIEnv *env, jobject jobj, jlong con,
|
||||||
|
jlong tres) {
|
||||||
|
TAOS *tscon = (TAOS *)con;
|
||||||
|
if (tscon == NULL) {
|
||||||
|
jniError("jobj:%p, connection is closed", jobj);
|
||||||
|
return JNI_CONNECTION_NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((void *)tres == NULL) {
|
||||||
|
jniError("jobj:%p, conn:%p, resultset is null", jobj, tscon);
|
||||||
|
return JNI_RESULT_SET_NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
SSqlObj *pSql = (TAOS_RES *)tres;
|
||||||
|
|
||||||
|
return (tscIsUpdateQuery(pSql)? 1:0);
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_freeResultSetImp(JNIEnv *env, jobject jobj, jlong con,
|
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_freeResultSetImp(JNIEnv *env, jobject jobj, jlong con,
|
||||||
|
|
|
@ -907,7 +907,7 @@ static void genFinalResWithoutFill(SSqlRes* pRes, SLocalReducer *pLocalReducer,
|
||||||
savePrevRecordAndSetupFillInfo(pLocalReducer, pQueryInfo, pLocalReducer->pFillInfo);
|
savePrevRecordAndSetupFillInfo(pLocalReducer, pQueryInfo, pLocalReducer->pFillInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
memcpy(pRes->data, pBeforeFillData->data, pRes->numOfRows * pLocalReducer->finalRowSize);
|
memcpy(pRes->data, pBeforeFillData->data, (size_t)(pRes->numOfRows * pLocalReducer->finalRowSize));
|
||||||
|
|
||||||
pRes->numOfClauseTotal += pRes->numOfRows;
|
pRes->numOfClauseTotal += pRes->numOfRows;
|
||||||
pBeforeFillData->num = 0;
|
pBeforeFillData->num = 0;
|
||||||
|
@ -943,7 +943,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO
|
||||||
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
|
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
|
||||||
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
|
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
|
||||||
memmove(pResPages[i]->data, pResPages[i]->data + pField->bytes * pQueryInfo->limit.offset,
|
memmove(pResPages[i]->data, pResPages[i]->data + pField->bytes * pQueryInfo->limit.offset,
|
||||||
newRows * pField->bytes);
|
(size_t)(newRows * pField->bytes));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -988,7 +988,7 @@ static void doFillResult(SSqlObj *pSql, SLocalReducer *pLocalReducer, bool doneO
|
||||||
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
|
for (int32_t i = 0; i < pQueryInfo->fieldsInfo.numOfOutput; ++i) {
|
||||||
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
|
TAOS_FIELD *pField = tscFieldInfoGetField(&pQueryInfo->fieldsInfo, i);
|
||||||
int16_t offset = getColumnModelOffset(pLocalReducer->resColModel, i);
|
int16_t offset = getColumnModelOffset(pLocalReducer->resColModel, i);
|
||||||
memcpy(pRes->data + offset * pRes->numOfRows, pResPages[i]->data, pField->bytes * pRes->numOfRows);
|
memcpy(pRes->data + offset * pRes->numOfRows, pResPages[i]->data, (size_t)(pField->bytes * pRes->numOfRows));
|
||||||
}
|
}
|
||||||
} else { // todo bug??
|
} else { // todo bug??
|
||||||
reversedCopyFromInterpolationToDstBuf(pQueryInfo, pRes, pResPages, pLocalReducer);
|
reversedCopyFromInterpolationToDstBuf(pQueryInfo, pRes, pResPages, pLocalReducer);
|
||||||
|
|
|
@ -1652,10 +1652,12 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
|
||||||
}
|
}
|
||||||
|
|
||||||
SColumnIndex index = COLUMN_INDEX_INITIALIZER;
|
SColumnIndex index = COLUMN_INDEX_INITIALIZER;
|
||||||
if ((getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) ||
|
if ((getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS)) {
|
||||||
index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
|
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
}
|
}
|
||||||
|
if (index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg6);
|
||||||
|
}
|
||||||
|
|
||||||
// 2. check if sql function can be applied on this column data type
|
// 2. check if sql function can be applied on this column data type
|
||||||
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
|
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
|
||||||
|
@ -1864,7 +1866,10 @@ int32_t addExprAndResultField(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, int32_t col
|
||||||
if (getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) {
|
if (getColumnIndexByName(pCmd, &pParamElem->pNode->colInfo, pQueryInfo, &index) != TSDB_CODE_SUCCESS) {
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
}
|
}
|
||||||
|
if (index.columnIndex == TSDB_TBNAME_COLUMN_INDEX) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg6);
|
||||||
|
}
|
||||||
|
|
||||||
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
|
pTableMetaInfo = tscGetMetaInfo(pQueryInfo, index.tableIndex);
|
||||||
SSchema* pSchema = tscGetTableSchema(pTableMetaInfo->pTableMeta);
|
SSchema* pSchema = tscGetTableSchema(pTableMetaInfo->pTableMeta);
|
||||||
|
|
||||||
|
|
|
@ -43,6 +43,14 @@ void tscUpdateSubscriptionProgress(void* sub, int64_t uid, TSKEY ts);
|
||||||
void tscSaveSubscriptionProgress(void* sub);
|
void tscSaveSubscriptionProgress(void* sub);
|
||||||
|
|
||||||
static int32_t minMsgSize() { return tsRpcHeadSize + 100; }
|
static int32_t minMsgSize() { return tsRpcHeadSize + 100; }
|
||||||
|
static int32_t getWaitingTimeInterval(int32_t count) {
|
||||||
|
int32_t initial = 100; // 100 ms by default
|
||||||
|
if (count <= 1) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return initial * (2<<(count - 2));
|
||||||
|
}
|
||||||
|
|
||||||
static void tscSetDnodeEpSet(SSqlObj* pSql, SCMVgroupInfo* pVgroupInfo) {
|
static void tscSetDnodeEpSet(SSqlObj* pSql, SCMVgroupInfo* pVgroupInfo) {
|
||||||
assert(pSql != NULL && pVgroupInfo != NULL && pVgroupInfo->numOfEps > 0);
|
assert(pSql != NULL && pVgroupInfo != NULL && pVgroupInfo->numOfEps > 0);
|
||||||
|
@ -100,7 +108,7 @@ static void tscDumpEpSetFromVgroupInfo(SCMCorVgroupInfo *pVgroupInfo, SRpcEpSet
|
||||||
pEpSet->inUse = (inUse >= 0 && inUse < TSDB_MAX_REPLICA) ? inUse: 0;
|
pEpSet->inUse = (inUse >= 0 && inUse < TSDB_MAX_REPLICA) ? inUse: 0;
|
||||||
pEpSet->numOfEps = pVgroupInfo->numOfEps;
|
pEpSet->numOfEps = pVgroupInfo->numOfEps;
|
||||||
for (int32_t i = 0; i < pVgroupInfo->numOfEps; ++i) {
|
for (int32_t i = 0; i < pVgroupInfo->numOfEps; ++i) {
|
||||||
strncpy(pEpSet->fqdn[i], pVgroupInfo->epAddr[i].fqdn, TSDB_FQDN_LEN);
|
tstrncpy(pEpSet->fqdn[i], pVgroupInfo->epAddr[i].fqdn, sizeof(pEpSet->fqdn[i]));
|
||||||
pEpSet->port[i] = pVgroupInfo->epAddr[i].port;
|
pEpSet->port[i] = pVgroupInfo->epAddr[i].port;
|
||||||
}
|
}
|
||||||
taosCorEndRead(&pVgroupInfo->version);
|
taosCorEndRead(&pVgroupInfo->version);
|
||||||
|
@ -117,7 +125,7 @@ static void tscUpdateVgroupInfo(SSqlObj *pObj, SRpcEpSet *pEpSet) {
|
||||||
pVgroupInfo->inUse = pEpSet->inUse;
|
pVgroupInfo->inUse = pEpSet->inUse;
|
||||||
pVgroupInfo->numOfEps = pEpSet->numOfEps;
|
pVgroupInfo->numOfEps = pEpSet->numOfEps;
|
||||||
for (int32_t i = 0; i < pVgroupInfo->numOfEps; i++) {
|
for (int32_t i = 0; i < pVgroupInfo->numOfEps; i++) {
|
||||||
strncpy(pVgroupInfo->epAddr[i].fqdn, pEpSet->fqdn[i], TSDB_FQDN_LEN);
|
tstrncpy(pVgroupInfo->epAddr[i].fqdn, pEpSet->fqdn[i], sizeof(pEpSet->fqdn[i]));
|
||||||
pVgroupInfo->epAddr[i].port = pEpSet->port[i];
|
pVgroupInfo->epAddr[i].port = pEpSet->port[i];
|
||||||
}
|
}
|
||||||
tscDebug("after: EndPoint in use: %d", pVgroupInfo->inUse);
|
tscDebug("after: EndPoint in use: %d", pVgroupInfo->inUse);
|
||||||
|
@ -275,6 +283,7 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) {
|
||||||
(rpcMsg->code == TSDB_CODE_TDB_INVALID_TABLE_ID ||
|
(rpcMsg->code == TSDB_CODE_TDB_INVALID_TABLE_ID ||
|
||||||
rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID ||
|
rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID ||
|
||||||
rpcMsg->code == TSDB_CODE_RPC_NETWORK_UNAVAIL ||
|
rpcMsg->code == TSDB_CODE_RPC_NETWORK_UNAVAIL ||
|
||||||
|
rpcMsg->code == TSDB_CODE_APP_NOT_READY ||
|
||||||
rpcMsg->code == TSDB_CODE_TDB_TABLE_RECONFIGURE)) {
|
rpcMsg->code == TSDB_CODE_TDB_TABLE_RECONFIGURE)) {
|
||||||
tscWarn("%p it shall renew table meta, code:%s, retry:%d", pSql, tstrerror(rpcMsg->code), ++pSql->retry);
|
tscWarn("%p it shall renew table meta, code:%s, retry:%d", pSql, tstrerror(rpcMsg->code), ++pSql->retry);
|
||||||
|
|
||||||
|
@ -287,6 +296,12 @@ void tscProcessMsgFromServer(SRpcMsg *rpcMsg, SRpcEpSet *pEpSet) {
|
||||||
if (pSql->retry > pSql->maxRetry) {
|
if (pSql->retry > pSql->maxRetry) {
|
||||||
tscError("%p max retry %d reached, give up", pSql, pSql->maxRetry);
|
tscError("%p max retry %d reached, give up", pSql, pSql->maxRetry);
|
||||||
} else {
|
} else {
|
||||||
|
// wait for a little bit moment and then retry
|
||||||
|
if (rpcMsg->code == TSDB_CODE_APP_NOT_READY || rpcMsg->code == TSDB_CODE_VND_INVALID_VGROUP_ID) {
|
||||||
|
int32_t duration = getWaitingTimeInterval(pSql->retry);
|
||||||
|
taosMsleep(duration);
|
||||||
|
}
|
||||||
|
|
||||||
rpcMsg->code = tscRenewTableMeta(pSql, pTableMetaInfo->name);
|
rpcMsg->code = tscRenewTableMeta(pSql, pTableMetaInfo->name);
|
||||||
|
|
||||||
// if there is an error occurring, proceed to the following error handling procedure.
|
// if there is an error occurring, proceed to the following error handling procedure.
|
||||||
|
@ -708,7 +723,7 @@ int tscBuildQueryMsg(SSqlObj *pSql, SSqlInfo *pInfo) {
|
||||||
|
|
||||||
if (pColFilter->filterstr) {
|
if (pColFilter->filterstr) {
|
||||||
pFilterMsg->len = htobe64(pColFilter->len);
|
pFilterMsg->len = htobe64(pColFilter->len);
|
||||||
memcpy(pMsg, (void *)pColFilter->pz, pColFilter->len + 1);
|
memcpy(pMsg, (void *)pColFilter->pz, (size_t)(pColFilter->len + 1));
|
||||||
pMsg += (pColFilter->len + 1); // append the additional filter binary info
|
pMsg += (pColFilter->len + 1); // append the additional filter binary info
|
||||||
} else {
|
} else {
|
||||||
pFilterMsg->lowerBndi = htobe64(pColFilter->lowerBndi);
|
pFilterMsg->lowerBndi = htobe64(pColFilter->lowerBndi);
|
||||||
|
|
|
@ -623,7 +623,7 @@ static void tidTagRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow
|
||||||
|
|
||||||
// keep the results in memory
|
// keep the results in memory
|
||||||
if (numOfRows > 0) {
|
if (numOfRows > 0) {
|
||||||
size_t validLen = pSupporter->tagSize * pRes->numOfRows;
|
size_t validLen = (size_t)(pSupporter->tagSize * pRes->numOfRows);
|
||||||
size_t length = pSupporter->totalLen + validLen;
|
size_t length = pSupporter->totalLen + validLen;
|
||||||
|
|
||||||
// todo handle memory error
|
// todo handle memory error
|
||||||
|
@ -748,7 +748,7 @@ static void tsCompRetrieveCallback(void* param, TAOS_RES* tres, int32_t numOfRow
|
||||||
}
|
}
|
||||||
|
|
||||||
if (numOfRows > 0) { // write the compressed timestamp to disk file
|
if (numOfRows > 0) { // write the compressed timestamp to disk file
|
||||||
fwrite(pRes->data, pRes->numOfRows, 1, pSupporter->f);
|
fwrite(pRes->data, (size_t)pRes->numOfRows, 1, pSupporter->f);
|
||||||
fclose(pSupporter->f);
|
fclose(pSupporter->f);
|
||||||
pSupporter->f = NULL;
|
pSupporter->f = NULL;
|
||||||
|
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
node_modules
|
|
||||||
npm-debug.log
|
|
||||||
coverage/
|
|
||||||
.aws-config.json
|
|
||||||
awsconfig
|
|
||||||
/emails/dist
|
|
||||||
/public_gen
|
|
||||||
/tmp
|
|
||||||
vendor/phantomjs/phantomjs
|
|
||||||
|
|
||||||
docs/AWS_S3_BUCKET
|
|
||||||
docs/GIT_BRANCH
|
|
||||||
docs/VERSION
|
|
||||||
docs/GITCOMMIT
|
|
||||||
docs/changed-files
|
|
||||||
docs/changed-files
|
|
||||||
|
|
||||||
# locally required config files
|
|
||||||
public/css/*.min.css
|
|
||||||
|
|
||||||
# Editor junk
|
|
||||||
*.sublime-workspace
|
|
||||||
*.swp
|
|
||||||
.idea/
|
|
||||||
*.iml
|
|
||||||
|
|
||||||
/data/*
|
|
||||||
/bin/*
|
|
||||||
|
|
||||||
conf/custom.ini
|
|
||||||
fig.yml
|
|
||||||
profile.cov
|
|
||||||
grafana
|
|
||||||
.notouch
|
|
||||||
|
|
||||||
# Test artifacts
|
|
||||||
/dist/test/
|
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
"esnext": true,
|
|
||||||
"disallowImplicitTypeConversion": ["string"],
|
|
||||||
"disallowKeywords": ["with"],
|
|
||||||
"disallowMultipleLineBreaks": true,
|
|
||||||
"disallowMixedSpacesAndTabs": true,
|
|
||||||
"disallowTrailingWhitespace": true,
|
|
||||||
"requireSpacesInFunctionExpression": {
|
|
||||||
"beforeOpeningCurlyBrace": true
|
|
||||||
},
|
|
||||||
"disallowSpacesInsideArrayBrackets": true,
|
|
||||||
"disallowSpacesInsideParentheses": true,
|
|
||||||
"validateIndentation": 2
|
|
||||||
}
|
|
|
@ -1,85 +0,0 @@
|
||||||
module.exports = function(grunt) {
|
|
||||||
|
|
||||||
require('load-grunt-tasks')(grunt);
|
|
||||||
|
|
||||||
grunt.loadNpmTasks('grunt-execute');
|
|
||||||
grunt.loadNpmTasks('grunt-contrib-clean');
|
|
||||||
|
|
||||||
grunt.initConfig({
|
|
||||||
|
|
||||||
clean: ["dist"],
|
|
||||||
|
|
||||||
copy: {
|
|
||||||
src_to_dist: {
|
|
||||||
cwd: 'src',
|
|
||||||
expand: true,
|
|
||||||
src: ['**/*', '!**/*.js', '!**/*.scss'],
|
|
||||||
dest: 'dist'
|
|
||||||
},
|
|
||||||
dashboard_to_dist: {
|
|
||||||
expand: true,
|
|
||||||
src: ['dashboard/*'],
|
|
||||||
dest: 'dist'
|
|
||||||
},
|
|
||||||
pluginDef: {
|
|
||||||
expand: true,
|
|
||||||
src: ['README.md'],
|
|
||||||
dest: 'dist'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
watch: {
|
|
||||||
rebuild_all: {
|
|
||||||
files: ['src/**/*'],
|
|
||||||
tasks: ['default'],
|
|
||||||
options: {spawn: false}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
babel: {
|
|
||||||
options: {
|
|
||||||
sourceMap: true,
|
|
||||||
presets: ['env'],
|
|
||||||
plugins: ['transform-object-rest-spread']
|
|
||||||
},
|
|
||||||
dist: {
|
|
||||||
files: [{
|
|
||||||
cwd: 'src',
|
|
||||||
expand: true,
|
|
||||||
src: ['**/*.js'],
|
|
||||||
dest: 'dist',
|
|
||||||
ext:'.js'
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
distTestNoSystemJs: {
|
|
||||||
files: [{
|
|
||||||
cwd: 'src',
|
|
||||||
expand: true,
|
|
||||||
src: ['**/*.js'],
|
|
||||||
dest: 'dist/test',
|
|
||||||
ext:'.js'
|
|
||||||
}]
|
|
||||||
},
|
|
||||||
distTestsSpecsNoSystemJs: {
|
|
||||||
files: [{
|
|
||||||
expand: true,
|
|
||||||
cwd: 'spec',
|
|
||||||
src: ['**/*.js'],
|
|
||||||
dest: 'dist/test/spec',
|
|
||||||
ext:'.js'
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
mochaTest: {
|
|
||||||
test: {
|
|
||||||
options: {
|
|
||||||
reporter: 'spec'
|
|
||||||
},
|
|
||||||
src: ['dist/test/spec/test-main.js', 'dist/test/spec/*_spec.js']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
grunt.registerTask('default', ['clean', 'copy:src_to_dist', 'copy:dashboard_to_dist', 'copy:pluginDef', 'babel', 'mochaTest']);
|
|
||||||
};
|
|
|
@ -1,661 +0,0 @@
|
||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
|
||||||
this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published
|
|
||||||
by the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
|
@ -1,96 +0,0 @@
|
||||||
TDengine Datasource - build by Taosdata Inc. www.taosdata.com
|
|
||||||
|
|
||||||
TDengine backend server implement 2 urls:
|
|
||||||
|
|
||||||
* `/heartbeat` return 200 ok. Used for "Test connection" on the datasource config page.
|
|
||||||
* `/query` return data based on input sqls.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
To install this plugin:
|
|
||||||
Copy the data source you want to /var/lib/grafana/plugins/. Then restart grafana-server. The new data source should now be available in the data source type dropdown in the Add Data Source View.
|
|
||||||
|
|
||||||
```
|
|
||||||
cp -r <tdengine-extrach-dir>/connector/grafana/tdengine /var/lib/grafana/plugins/
|
|
||||||
sudo service grafana-server restart
|
|
||||||
```
|
|
||||||
|
|
||||||
### Query API
|
|
||||||
|
|
||||||
Example request
|
|
||||||
``` javascript
|
|
||||||
[{
|
|
||||||
"refId": "A",
|
|
||||||
"alias": "taosd-memory",
|
|
||||||
"sql": "select avg(mem_taosd) from sys.dn where ts > now-5m and ts < now interval(500a)"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"refId": "B",
|
|
||||||
"alias": "system-memory",
|
|
||||||
"sql": "select avg(mem_system) from sys.dn where ts > now-5m and ts < now interval(500a)"
|
|
||||||
}]
|
|
||||||
```
|
|
||||||
|
|
||||||
Example response
|
|
||||||
``` javascript
|
|
||||||
[{
|
|
||||||
"datapoints": [
|
|
||||||
[206.488281, 1538137825000],
|
|
||||||
[206.488281, 1538137855000],
|
|
||||||
[206.488281, 1538137885500],
|
|
||||||
[210.609375, 1538137915500],
|
|
||||||
[210.867188, 1538137945500]
|
|
||||||
],
|
|
||||||
"refId": "A",
|
|
||||||
"target": "taosd-memory"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datapoints": [
|
|
||||||
[2910.218750, 1538137825000],
|
|
||||||
[2912.265625, 1538137855000],
|
|
||||||
[2912.437500, 1538137885500],
|
|
||||||
[2916.644531, 1538137915500],
|
|
||||||
[2917.066406, 1538137945500]
|
|
||||||
],
|
|
||||||
"refId": "B",
|
|
||||||
"target": "system-memory"
|
|
||||||
}]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Heartbeat API
|
|
||||||
|
|
||||||
Example request
|
|
||||||
``` javascript
|
|
||||||
Get request /heartbeat
|
|
||||||
```
|
|
||||||
|
|
||||||
Example response
|
|
||||||
``` javascript
|
|
||||||
{
|
|
||||||
"message": "Grafana server receive a quest from you!"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Dev setup
|
|
||||||
|
|
||||||
This plugin requires node 6.10.0
|
|
||||||
|
|
||||||
``` javascript
|
|
||||||
|
|
||||||
npm install -g yarn
|
|
||||||
yarn install
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### Import Dashboard
|
|
||||||
|
|
||||||
after login `http://localhost:3000 `, then you can import the tdengine demo dashboard to monitor the system metrics.
|
|
||||||
|
|
||||||
you can import the `dashboard/tdengine-grafana.json`:
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
after finished import:
|
|
||||||
|
|
||||||

|
|
Before Width: | Height: | Size: 98 KiB |
|
@ -1,588 +0,0 @@
|
||||||
{
|
|
||||||
"annotations": {
|
|
||||||
"list": [
|
|
||||||
{
|
|
||||||
"builtIn": 1,
|
|
||||||
"datasource": "-- Grafana --",
|
|
||||||
"enable": true,
|
|
||||||
"hide": true,
|
|
||||||
"iconColor": "rgba(0, 211, 255, 1)",
|
|
||||||
"name": "Annotations & Alerts",
|
|
||||||
"type": "dashboard"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"editable": true,
|
|
||||||
"gnetId": null,
|
|
||||||
"graphTooltip": 0,
|
|
||||||
"id": 3,
|
|
||||||
"links": [],
|
|
||||||
"panels": [
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"colorBackground": false,
|
|
||||||
"colorValue": true,
|
|
||||||
"colors": [
|
|
||||||
"#299c46",
|
|
||||||
"rgba(237, 129, 40, 0.89)",
|
|
||||||
"#d44a3a"
|
|
||||||
],
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "total select request per minute last hour",
|
|
||||||
"format": "none",
|
|
||||||
"gauge": {
|
|
||||||
"maxValue": 100,
|
|
||||||
"minValue": 0,
|
|
||||||
"show": false,
|
|
||||||
"thresholdLabels": false,
|
|
||||||
"thresholdMarkers": true
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 0
|
|
||||||
},
|
|
||||||
"id": 8,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"mappingType": 1,
|
|
||||||
"mappingTypes": [
|
|
||||||
{
|
|
||||||
"name": "value to text",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range to text",
|
|
||||||
"value": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"maxDataPoints": 100,
|
|
||||||
"nullPointMode": "connected",
|
|
||||||
"nullText": null,
|
|
||||||
"options": {},
|
|
||||||
"postfix": "次数/min",
|
|
||||||
"postfixFontSize": "20%",
|
|
||||||
"prefix": "",
|
|
||||||
"prefixFontSize": "50%",
|
|
||||||
"rangeMaps": [
|
|
||||||
{
|
|
||||||
"from": "null",
|
|
||||||
"text": "N/A",
|
|
||||||
"to": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sparkline": {
|
|
||||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
|
||||||
"full": true,
|
|
||||||
"lineColor": "rgb(31, 120, 193)",
|
|
||||||
"show": true,
|
|
||||||
"ymax": null,
|
|
||||||
"ymin": null
|
|
||||||
},
|
|
||||||
"tableColumn": "",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "req_select",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select sum(req_select) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": "120,240",
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "req select",
|
|
||||||
"type": "singlestat",
|
|
||||||
"valueFontSize": "150%",
|
|
||||||
"valueMaps": [
|
|
||||||
{
|
|
||||||
"op": "=",
|
|
||||||
"text": "N/A",
|
|
||||||
"value": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"valueName": "total"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"colorBackground": false,
|
|
||||||
"colorValue": true,
|
|
||||||
"colors": [
|
|
||||||
"#299c46",
|
|
||||||
"rgba(237, 129, 40, 0.89)",
|
|
||||||
"#d44a3a"
|
|
||||||
],
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "total insert request per minute for last hour",
|
|
||||||
"format": "none",
|
|
||||||
"gauge": {
|
|
||||||
"maxValue": 100,
|
|
||||||
"minValue": 0,
|
|
||||||
"show": false,
|
|
||||||
"thresholdLabels": false,
|
|
||||||
"thresholdMarkers": true
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 0
|
|
||||||
},
|
|
||||||
"id": 6,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"mappingType": 1,
|
|
||||||
"mappingTypes": [
|
|
||||||
{
|
|
||||||
"name": "value to text",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range to text",
|
|
||||||
"value": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"maxDataPoints": 100,
|
|
||||||
"nullPointMode": "connected",
|
|
||||||
"nullText": null,
|
|
||||||
"options": {},
|
|
||||||
"postfix": "次数/min",
|
|
||||||
"postfixFontSize": "20%",
|
|
||||||
"prefix": "",
|
|
||||||
"prefixFontSize": "50%",
|
|
||||||
"rangeMaps": [
|
|
||||||
{
|
|
||||||
"from": "null",
|
|
||||||
"text": "N/A",
|
|
||||||
"to": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sparkline": {
|
|
||||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
|
||||||
"full": false,
|
|
||||||
"lineColor": "rgb(31, 120, 193)",
|
|
||||||
"show": true,
|
|
||||||
"ymax": null,
|
|
||||||
"ymin": null
|
|
||||||
},
|
|
||||||
"tableColumn": "",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "req_insert",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select sum(req_insert) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": "110,240",
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "req insert",
|
|
||||||
"type": "singlestat",
|
|
||||||
"valueFontSize": "150%",
|
|
||||||
"valueMaps": [
|
|
||||||
{
|
|
||||||
"op": "=",
|
|
||||||
"text": "N/A",
|
|
||||||
"value": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"valueName": "total"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "taosd max memery last 10 minutes",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 0,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 12,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"mean"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 4096,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 80
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "#EAB839",
|
|
||||||
"value": 2048
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"unit": "decmbytes"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "mem_taosd",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select max(mem_taosd) from log.dn where ts >= now -10m and ts < now",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "taosd memery",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "max System Memory last 1 hour",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 8,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 10,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"last"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 4,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "semi-dark-orange",
|
|
||||||
"value": 60
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "dark-red",
|
|
||||||
"value": 80
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"title": "",
|
|
||||||
"unit": "decmbytes"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "mem_system",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select max(mem_system) from log.dn where ts >= now -10h and ts < now",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "system memory",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "avg band speed last one minute",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 16,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 14,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"last"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 8192,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "#EAB839",
|
|
||||||
"value": 4916
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 6554
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"unit": "Kbits"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "band_speed",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(band_speed) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "band speed",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": false,
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "monitor system cpu",
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 11,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 12
|
|
||||||
},
|
|
||||||
"hideTimeOverride": true,
|
|
||||||
"id": 2,
|
|
||||||
"legend": {
|
|
||||||
"avg": false,
|
|
||||||
"current": false,
|
|
||||||
"max": false,
|
|
||||||
"min": false,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"lines": true,
|
|
||||||
"linewidth": 1,
|
|
||||||
"links": [],
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"dataLinks": []
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"pointradius": 2,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "cpu_system11",
|
|
||||||
"hide": false,
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(cpu_system) from log.dn where ts >= now-1h and ts < now interval(1s)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alias": "cpu_taosd",
|
|
||||||
"hide": false,
|
|
||||||
"refId": "B",
|
|
||||||
"sql": "select avg(cpu_taosd) from log.dn where ts >= now-1h and ts < now interval(1s)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": "1h",
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": "30s",
|
|
||||||
"title": "cpu_system",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"decimals": null,
|
|
||||||
"format": "percent",
|
|
||||||
"label": "使用占比",
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": false
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": false,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 11,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 12
|
|
||||||
},
|
|
||||||
"id": 18,
|
|
||||||
"legend": {
|
|
||||||
"avg": false,
|
|
||||||
"current": false,
|
|
||||||
"max": false,
|
|
||||||
"min": false,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"lines": true,
|
|
||||||
"linewidth": 1,
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"dataLinks": []
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pointradius": 2,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(disk_used) disk_used from log.dn where ts >= $from and ts < $to interval(1s) group by ipaddr",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "avg_disk_used",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"format": "decgbytes",
|
|
||||||
"label": "",
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"refresh": "5s",
|
|
||||||
"schemaVersion": 20,
|
|
||||||
"style": "dark",
|
|
||||||
"tags": [],
|
|
||||||
"templating": {
|
|
||||||
"list": []
|
|
||||||
},
|
|
||||||
"time": {
|
|
||||||
"from": "now-1h",
|
|
||||||
"to": "now"
|
|
||||||
},
|
|
||||||
"timepicker": {
|
|
||||||
"refresh_intervals": [
|
|
||||||
"5s",
|
|
||||||
"10s",
|
|
||||||
"30s",
|
|
||||||
"1m",
|
|
||||||
"5m",
|
|
||||||
"15m",
|
|
||||||
"30m",
|
|
||||||
"1h",
|
|
||||||
"2h",
|
|
||||||
"1d"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"timezone": "",
|
|
||||||
"title": "TDengine",
|
|
||||||
"uid": "FE-vpe0Wk",
|
|
||||||
"version": 1
|
|
||||||
}
|
|
Before Width: | Height: | Size: 173 KiB |
|
@ -1,96 +0,0 @@
|
||||||
TDengine Datasource - build by Taosdata Inc. www.taosdata.com
|
|
||||||
|
|
||||||
TDengine backend server implement 2 urls:
|
|
||||||
|
|
||||||
* `/heartbeat` return 200 ok. Used for "Test connection" on the datasource config page.
|
|
||||||
* `/query` return data based on input sqls.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
To install this plugin:
|
|
||||||
Copy the data source you want to /var/lib/grafana/plugins/. Then restart grafana-server. The new data source should now be available in the data source type dropdown in the Add Data Source View.
|
|
||||||
|
|
||||||
```
|
|
||||||
cp -r <tdengine-extrach-dir>/connector/grafana/tdengine /var/lib/grafana/plugins/
|
|
||||||
sudo service grafana-server restart
|
|
||||||
```
|
|
||||||
|
|
||||||
### Query API
|
|
||||||
|
|
||||||
Example request
|
|
||||||
``` javascript
|
|
||||||
[{
|
|
||||||
"refId": "A",
|
|
||||||
"alias": "taosd-memory",
|
|
||||||
"sql": "select avg(mem_taosd) from sys.dn where ts > now-5m and ts < now interval(500a)"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"refId": "B",
|
|
||||||
"alias": "system-memory",
|
|
||||||
"sql": "select avg(mem_system) from sys.dn where ts > now-5m and ts < now interval(500a)"
|
|
||||||
}]
|
|
||||||
```
|
|
||||||
|
|
||||||
Example response
|
|
||||||
``` javascript
|
|
||||||
[{
|
|
||||||
"datapoints": [
|
|
||||||
[206.488281, 1538137825000],
|
|
||||||
[206.488281, 1538137855000],
|
|
||||||
[206.488281, 1538137885500],
|
|
||||||
[210.609375, 1538137915500],
|
|
||||||
[210.867188, 1538137945500]
|
|
||||||
],
|
|
||||||
"refId": "A",
|
|
||||||
"target": "taosd-memory"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datapoints": [
|
|
||||||
[2910.218750, 1538137825000],
|
|
||||||
[2912.265625, 1538137855000],
|
|
||||||
[2912.437500, 1538137885500],
|
|
||||||
[2916.644531, 1538137915500],
|
|
||||||
[2917.066406, 1538137945500]
|
|
||||||
],
|
|
||||||
"refId": "B",
|
|
||||||
"target": "system-memory"
|
|
||||||
}]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Heartbeat API
|
|
||||||
|
|
||||||
Example request
|
|
||||||
``` javascript
|
|
||||||
Get request /heartbeat
|
|
||||||
```
|
|
||||||
|
|
||||||
Example response
|
|
||||||
``` javascript
|
|
||||||
{
|
|
||||||
"message": "Grafana server receive a quest from you!"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Dev setup
|
|
||||||
|
|
||||||
This plugin requires node 6.10.0
|
|
||||||
|
|
||||||
``` javascript
|
|
||||||
|
|
||||||
npm install -g yarn
|
|
||||||
yarn install
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### Import Dashboard
|
|
||||||
|
|
||||||
after login `http://localhost:3000 `, then you can import the tdengine demo dashboard to monitor the system metrics.
|
|
||||||
|
|
||||||
you can import the `dashboard/tdengine-grafana.json`:
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
after finished import:
|
|
||||||
|
|
||||||

|
|
|
@ -1,3 +0,0 @@
|
||||||
.generic-datasource-query-row .query-keyword {
|
|
||||||
width: 75px;
|
|
||||||
}
|
|
Before Width: | Height: | Size: 98 KiB |
|
@ -1,588 +0,0 @@
|
||||||
{
|
|
||||||
"annotations": {
|
|
||||||
"list": [
|
|
||||||
{
|
|
||||||
"builtIn": 1,
|
|
||||||
"datasource": "-- Grafana --",
|
|
||||||
"enable": true,
|
|
||||||
"hide": true,
|
|
||||||
"iconColor": "rgba(0, 211, 255, 1)",
|
|
||||||
"name": "Annotations & Alerts",
|
|
||||||
"type": "dashboard"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"editable": true,
|
|
||||||
"gnetId": null,
|
|
||||||
"graphTooltip": 0,
|
|
||||||
"id": 3,
|
|
||||||
"links": [],
|
|
||||||
"panels": [
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"colorBackground": false,
|
|
||||||
"colorValue": true,
|
|
||||||
"colors": [
|
|
||||||
"#299c46",
|
|
||||||
"rgba(237, 129, 40, 0.89)",
|
|
||||||
"#d44a3a"
|
|
||||||
],
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "total select request per minute last hour",
|
|
||||||
"format": "none",
|
|
||||||
"gauge": {
|
|
||||||
"maxValue": 100,
|
|
||||||
"minValue": 0,
|
|
||||||
"show": false,
|
|
||||||
"thresholdLabels": false,
|
|
||||||
"thresholdMarkers": true
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 0
|
|
||||||
},
|
|
||||||
"id": 8,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"mappingType": 1,
|
|
||||||
"mappingTypes": [
|
|
||||||
{
|
|
||||||
"name": "value to text",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range to text",
|
|
||||||
"value": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"maxDataPoints": 100,
|
|
||||||
"nullPointMode": "connected",
|
|
||||||
"nullText": null,
|
|
||||||
"options": {},
|
|
||||||
"postfix": "次数/min",
|
|
||||||
"postfixFontSize": "20%",
|
|
||||||
"prefix": "",
|
|
||||||
"prefixFontSize": "50%",
|
|
||||||
"rangeMaps": [
|
|
||||||
{
|
|
||||||
"from": "null",
|
|
||||||
"text": "N/A",
|
|
||||||
"to": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sparkline": {
|
|
||||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
|
||||||
"full": true,
|
|
||||||
"lineColor": "rgb(31, 120, 193)",
|
|
||||||
"show": true,
|
|
||||||
"ymax": null,
|
|
||||||
"ymin": null
|
|
||||||
},
|
|
||||||
"tableColumn": "",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "req_select",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select sum(req_select) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": "120,240",
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "req select",
|
|
||||||
"type": "singlestat",
|
|
||||||
"valueFontSize": "150%",
|
|
||||||
"valueMaps": [
|
|
||||||
{
|
|
||||||
"op": "=",
|
|
||||||
"text": "N/A",
|
|
||||||
"value": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"valueName": "total"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"colorBackground": false,
|
|
||||||
"colorValue": true,
|
|
||||||
"colors": [
|
|
||||||
"#299c46",
|
|
||||||
"rgba(237, 129, 40, 0.89)",
|
|
||||||
"#d44a3a"
|
|
||||||
],
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "total insert request per minute for last hour",
|
|
||||||
"format": "none",
|
|
||||||
"gauge": {
|
|
||||||
"maxValue": 100,
|
|
||||||
"minValue": 0,
|
|
||||||
"show": false,
|
|
||||||
"thresholdLabels": false,
|
|
||||||
"thresholdMarkers": true
|
|
||||||
},
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 0
|
|
||||||
},
|
|
||||||
"id": 6,
|
|
||||||
"interval": null,
|
|
||||||
"links": [],
|
|
||||||
"mappingType": 1,
|
|
||||||
"mappingTypes": [
|
|
||||||
{
|
|
||||||
"name": "value to text",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "range to text",
|
|
||||||
"value": 2
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"maxDataPoints": 100,
|
|
||||||
"nullPointMode": "connected",
|
|
||||||
"nullText": null,
|
|
||||||
"options": {},
|
|
||||||
"postfix": "次数/min",
|
|
||||||
"postfixFontSize": "20%",
|
|
||||||
"prefix": "",
|
|
||||||
"prefixFontSize": "50%",
|
|
||||||
"rangeMaps": [
|
|
||||||
{
|
|
||||||
"from": "null",
|
|
||||||
"text": "N/A",
|
|
||||||
"to": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"sparkline": {
|
|
||||||
"fillColor": "rgba(31, 118, 189, 0.18)",
|
|
||||||
"full": false,
|
|
||||||
"lineColor": "rgb(31, 120, 193)",
|
|
||||||
"show": true,
|
|
||||||
"ymax": null,
|
|
||||||
"ymin": null
|
|
||||||
},
|
|
||||||
"tableColumn": "",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "req_insert",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select sum(req_insert) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": "110,240",
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "req insert",
|
|
||||||
"type": "singlestat",
|
|
||||||
"valueFontSize": "150%",
|
|
||||||
"valueMaps": [
|
|
||||||
{
|
|
||||||
"op": "=",
|
|
||||||
"text": "N/A",
|
|
||||||
"value": "null"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"valueName": "total"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "taosd max memery last 10 minutes",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 0,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 12,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"mean"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 4096,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 80
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "#EAB839",
|
|
||||||
"value": 2048
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"unit": "decmbytes"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "mem_taosd",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select max(mem_taosd) from log.dn where ts >= now -10m and ts < now",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "taosd memery",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "max System Memory last 1 hour",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 8,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 10,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"last"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 4,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "semi-dark-orange",
|
|
||||||
"value": 60
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "dark-red",
|
|
||||||
"value": 80
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"title": "",
|
|
||||||
"unit": "decmbytes"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "mem_system",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select max(mem_system) from log.dn where ts >= now -10h and ts < now",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "system memory",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "avg band speed last one minute",
|
|
||||||
"gridPos": {
|
|
||||||
"h": 6,
|
|
||||||
"w": 8,
|
|
||||||
"x": 16,
|
|
||||||
"y": 6
|
|
||||||
},
|
|
||||||
"id": 14,
|
|
||||||
"options": {
|
|
||||||
"fieldOptions": {
|
|
||||||
"calcs": [
|
|
||||||
"last"
|
|
||||||
],
|
|
||||||
"defaults": {
|
|
||||||
"mappings": [],
|
|
||||||
"max": 8192,
|
|
||||||
"min": 0,
|
|
||||||
"thresholds": [
|
|
||||||
{
|
|
||||||
"color": "green",
|
|
||||||
"value": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "#EAB839",
|
|
||||||
"value": 4916
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"color": "red",
|
|
||||||
"value": 6554
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"unit": "Kbits"
|
|
||||||
},
|
|
||||||
"override": {},
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"orientation": "auto",
|
|
||||||
"showThresholdLabels": true,
|
|
||||||
"showThresholdMarkers": true
|
|
||||||
},
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "band_speed",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(band_speed) from log.dn where ts >= now-1h and ts < now interval(1m)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "band speed",
|
|
||||||
"type": "gauge"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": false,
|
|
||||||
"cacheTimeout": null,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"description": "monitor system cpu",
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 11,
|
|
||||||
"w": 12,
|
|
||||||
"x": 0,
|
|
||||||
"y": 12
|
|
||||||
},
|
|
||||||
"hideTimeOverride": true,
|
|
||||||
"id": 2,
|
|
||||||
"legend": {
|
|
||||||
"avg": false,
|
|
||||||
"current": false,
|
|
||||||
"max": false,
|
|
||||||
"min": false,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"lines": true,
|
|
||||||
"linewidth": 1,
|
|
||||||
"links": [],
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"dataLinks": []
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pluginVersion": "6.4.3",
|
|
||||||
"pointradius": 2,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "cpu_system11",
|
|
||||||
"hide": false,
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(cpu_system) from log.dn where ts >= now-1h and ts < now interval(1s)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"alias": "cpu_taosd",
|
|
||||||
"hide": false,
|
|
||||||
"refId": "B",
|
|
||||||
"sql": "select avg(cpu_taosd) from log.dn where ts >= now-1h and ts < now interval(1s)",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": "1h",
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": "30s",
|
|
||||||
"title": "cpu_system",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"decimals": null,
|
|
||||||
"format": "percent",
|
|
||||||
"label": "使用占比",
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": false
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"aliasColors": {},
|
|
||||||
"bars": false,
|
|
||||||
"dashLength": 10,
|
|
||||||
"dashes": false,
|
|
||||||
"datasource": "TDengine",
|
|
||||||
"fill": 1,
|
|
||||||
"fillGradient": 0,
|
|
||||||
"gridPos": {
|
|
||||||
"h": 11,
|
|
||||||
"w": 12,
|
|
||||||
"x": 12,
|
|
||||||
"y": 12
|
|
||||||
},
|
|
||||||
"id": 18,
|
|
||||||
"legend": {
|
|
||||||
"avg": false,
|
|
||||||
"current": false,
|
|
||||||
"max": false,
|
|
||||||
"min": false,
|
|
||||||
"show": true,
|
|
||||||
"total": false,
|
|
||||||
"values": false
|
|
||||||
},
|
|
||||||
"lines": true,
|
|
||||||
"linewidth": 1,
|
|
||||||
"nullPointMode": "null",
|
|
||||||
"options": {
|
|
||||||
"dataLinks": []
|
|
||||||
},
|
|
||||||
"percentage": false,
|
|
||||||
"pointradius": 2,
|
|
||||||
"points": false,
|
|
||||||
"renderer": "flot",
|
|
||||||
"seriesOverrides": [],
|
|
||||||
"spaceLength": 10,
|
|
||||||
"stack": false,
|
|
||||||
"steppedLine": false,
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"alias": "",
|
|
||||||
"refId": "A",
|
|
||||||
"sql": "select avg(disk_used) disk_used from log.dn where ts >= $from and ts < $to interval(1s) group by ipaddr",
|
|
||||||
"target": "select metric",
|
|
||||||
"type": "timeserie"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"thresholds": [],
|
|
||||||
"timeFrom": null,
|
|
||||||
"timeRegions": [],
|
|
||||||
"timeShift": null,
|
|
||||||
"title": "avg_disk_used",
|
|
||||||
"tooltip": {
|
|
||||||
"shared": true,
|
|
||||||
"sort": 0,
|
|
||||||
"value_type": "individual"
|
|
||||||
},
|
|
||||||
"type": "graph",
|
|
||||||
"xaxis": {
|
|
||||||
"buckets": null,
|
|
||||||
"mode": "time",
|
|
||||||
"name": null,
|
|
||||||
"show": true,
|
|
||||||
"values": []
|
|
||||||
},
|
|
||||||
"yaxes": [
|
|
||||||
{
|
|
||||||
"format": "decgbytes",
|
|
||||||
"label": "",
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"format": "short",
|
|
||||||
"label": null,
|
|
||||||
"logBase": 1,
|
|
||||||
"max": null,
|
|
||||||
"min": null,
|
|
||||||
"show": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"yaxis": {
|
|
||||||
"align": false,
|
|
||||||
"alignLevel": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"refresh": "5s",
|
|
||||||
"schemaVersion": 20,
|
|
||||||
"style": "dark",
|
|
||||||
"tags": [],
|
|
||||||
"templating": {
|
|
||||||
"list": []
|
|
||||||
},
|
|
||||||
"time": {
|
|
||||||
"from": "now-1h",
|
|
||||||
"to": "now"
|
|
||||||
},
|
|
||||||
"timepicker": {
|
|
||||||
"refresh_intervals": [
|
|
||||||
"5s",
|
|
||||||
"10s",
|
|
||||||
"30s",
|
|
||||||
"1m",
|
|
||||||
"5m",
|
|
||||||
"15m",
|
|
||||||
"30m",
|
|
||||||
"1h",
|
|
||||||
"2h",
|
|
||||||
"1d"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"timezone": "",
|
|
||||||
"title": "TDengine",
|
|
||||||
"uid": "FE-vpe0Wk",
|
|
||||||
"version": 1
|
|
||||||
}
|
|
Before Width: | Height: | Size: 173 KiB |
|
@ -1,156 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.GenericDatasource = undefined;
|
|
||||||
|
|
||||||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
|
|
||||||
|
|
||||||
var _lodash = require('lodash');
|
|
||||||
|
|
||||||
var _lodash2 = _interopRequireDefault(_lodash);
|
|
||||||
|
|
||||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
|
||||||
|
|
||||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|
||||||
|
|
||||||
var GenericDatasource = exports.GenericDatasource = function () {
|
|
||||||
function GenericDatasource(instanceSettings, $q, backendSrv, templateSrv) {
|
|
||||||
_classCallCheck(this, GenericDatasource);
|
|
||||||
|
|
||||||
this.type = instanceSettings.type;
|
|
||||||
this.url = instanceSettings.url;
|
|
||||||
this.name = instanceSettings.name;
|
|
||||||
this.q = $q;
|
|
||||||
this.backendSrv = backendSrv;
|
|
||||||
this.templateSrv = templateSrv;
|
|
||||||
this.headers = { 'Content-Type': 'application/json' };
|
|
||||||
this.headers.Authorization = this.getAuthorization(instanceSettings.jsonData);
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(GenericDatasource, [{
|
|
||||||
key: 'query',
|
|
||||||
value: function query(options) {
|
|
||||||
var targets = this.buildQueryParameters(options);
|
|
||||||
|
|
||||||
if (targets.length <= 0) {
|
|
||||||
return this.q.when({ data: [] });
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.doRequest({
|
|
||||||
url: this.url + '/grafana/query',
|
|
||||||
data: targets,
|
|
||||||
method: 'POST'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'testDatasource',
|
|
||||||
value: function testDatasource() {
|
|
||||||
return this.doRequest({
|
|
||||||
url: this.url + '/grafana/heartbeat',
|
|
||||||
method: 'GET'
|
|
||||||
}).then(function (response) {
|
|
||||||
if (response.status === 200) {
|
|
||||||
return { status: "success", message: "TDengine Data source is working", title: "Success" };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'doRequest',
|
|
||||||
value: function doRequest(options) {
|
|
||||||
options.headers = this.headers;
|
|
||||||
|
|
||||||
return this.backendSrv.datasourceRequest(options);
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'buildQueryParameters',
|
|
||||||
value: function buildQueryParameters(options) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
var targets = _lodash2.default.map(options.targets, function (target) {
|
|
||||||
return {
|
|
||||||
refId: target.refId,
|
|
||||||
alias: _this.generateAlias(options, target),
|
|
||||||
sql: _this.generateSql(options, target)
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return targets;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'encode',
|
|
||||||
value: function encode(input) {
|
|
||||||
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
|
||||||
var output = "";
|
|
||||||
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
|
|
||||||
var i = 0;
|
|
||||||
while (i < input.length) {
|
|
||||||
chr1 = input.charCodeAt(i++);
|
|
||||||
chr2 = input.charCodeAt(i++);
|
|
||||||
chr3 = input.charCodeAt(i++);
|
|
||||||
enc1 = chr1 >> 2;
|
|
||||||
enc2 = (chr1 & 3) << 4 | chr2 >> 4;
|
|
||||||
enc3 = (chr2 & 15) << 2 | chr3 >> 6;
|
|
||||||
enc4 = chr3 & 63;
|
|
||||||
if (isNaN(chr2)) {
|
|
||||||
enc3 = enc4 = 64;
|
|
||||||
} else if (isNaN(chr3)) {
|
|
||||||
enc4 = 64;
|
|
||||||
}
|
|
||||||
output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'getAuthorization',
|
|
||||||
value: function getAuthorization(jsonData) {
|
|
||||||
jsonData = jsonData || {};
|
|
||||||
var defaultUser = jsonData.user || "root";
|
|
||||||
var defaultPassword = jsonData.password || "taosdata";
|
|
||||||
|
|
||||||
return "Basic " + this.encode(defaultUser + ":" + defaultPassword);
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'generateAlias',
|
|
||||||
value: function generateAlias(options, target) {
|
|
||||||
var alias = target.alias || "";
|
|
||||||
alias = this.templateSrv.replace(alias, options.scopedVars, 'csv');
|
|
||||||
return alias;
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'generateSql',
|
|
||||||
value: function generateSql(options, target) {
|
|
||||||
var sql = target.sql;
|
|
||||||
if (sql == null || sql == "") {
|
|
||||||
return sql;
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryStart = "now-1h";
|
|
||||||
if (options != null && options.range != null && options.range.from != null) {
|
|
||||||
queryStart = options.range.from.toISOString();
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryEnd = "now";
|
|
||||||
if (options != null && options.range != null && options.range.to != null) {
|
|
||||||
queryEnd = options.range.to.toISOString();
|
|
||||||
}
|
|
||||||
var intervalMs = options.intervalMs || "20000";
|
|
||||||
|
|
||||||
intervalMs += "a";
|
|
||||||
sql = sql.replace(/^\s+|\s+$/gm, '');
|
|
||||||
sql = sql.replace("$from", "'" + queryStart + "'");
|
|
||||||
sql = sql.replace("$begin", "'" + queryStart + "'");
|
|
||||||
sql = sql.replace("$to", "'" + queryEnd + "'");
|
|
||||||
sql = sql.replace("$end", "'" + queryEnd + "'");
|
|
||||||
sql = sql.replace("$interval", intervalMs);
|
|
||||||
|
|
||||||
sql = this.templateSrv.replace(sql, options.scopedVars, 'csv');
|
|
||||||
return sql;
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return GenericDatasource;
|
|
||||||
}();
|
|
||||||
//# sourceMappingURL=datasource.js.map
|
|
Before Width: | Height: | Size: 3.1 KiB |
|
@ -1,37 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.AnnotationsQueryCtrl = exports.QueryOptionsCtrl = exports.ConfigCtrl = exports.QueryCtrl = exports.Datasource = undefined;
|
|
||||||
|
|
||||||
var _datasource = require('./datasource');
|
|
||||||
|
|
||||||
var _query_ctrl = require('./query_ctrl');
|
|
||||||
|
|
||||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|
||||||
|
|
||||||
var GenericConfigCtrl = function GenericConfigCtrl() {
|
|
||||||
_classCallCheck(this, GenericConfigCtrl);
|
|
||||||
};
|
|
||||||
|
|
||||||
GenericConfigCtrl.templateUrl = 'partials/config.html';
|
|
||||||
|
|
||||||
var GenericQueryOptionsCtrl = function GenericQueryOptionsCtrl() {
|
|
||||||
_classCallCheck(this, GenericQueryOptionsCtrl);
|
|
||||||
};
|
|
||||||
|
|
||||||
GenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';
|
|
||||||
|
|
||||||
var GenericAnnotationsQueryCtrl = function GenericAnnotationsQueryCtrl() {
|
|
||||||
_classCallCheck(this, GenericAnnotationsQueryCtrl);
|
|
||||||
};
|
|
||||||
|
|
||||||
GenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html';
|
|
||||||
|
|
||||||
exports.Datasource = _datasource.GenericDatasource;
|
|
||||||
exports.QueryCtrl = _query_ctrl.GenericDatasourceQueryCtrl;
|
|
||||||
exports.ConfigCtrl = GenericConfigCtrl;
|
|
||||||
exports.QueryOptionsCtrl = GenericQueryOptionsCtrl;
|
|
||||||
exports.AnnotationsQueryCtrl = GenericAnnotationsQueryCtrl;
|
|
||||||
//# sourceMappingURL=module.js.map
|
|
|
@ -1 +0,0 @@
|
||||||
{"version":3,"sources":["../src/module.js"],"names":["GenericConfigCtrl","templateUrl","GenericQueryOptionsCtrl","GenericAnnotationsQueryCtrl","Datasource","GenericDatasource","QueryCtrl","GenericDatasourceQueryCtrl","ConfigCtrl","QueryOptionsCtrl","AnnotationsQueryCtrl"],"mappings":";;;;;;;AAAA;;AACA;;;;IAEMA,iB;;;;AACNA,kBAAkBC,WAAlB,GAAgC,sBAAhC;;IAEMC,uB;;;;AACNA,wBAAwBD,WAAxB,GAAsC,6BAAtC;;IAEME,2B;;;;AACNA,4BAA4BF,WAA5B,GAA0C,kCAA1C;;QAGuBG,U,GAArBC,6B;QAC8BC,S,GAA9BC,sC;QACqBC,U,GAArBR,iB;QAC2BS,gB,GAA3BP,uB;QAC+BQ,oB,GAA/BP,2B","file":"module.js","sourcesContent":["import {GenericDatasource} from './datasource';\nimport {GenericDatasourceQueryCtrl} from './query_ctrl';\n\nclass GenericConfigCtrl {}\nGenericConfigCtrl.templateUrl = 'partials/config.html';\n\nclass GenericQueryOptionsCtrl {}\nGenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';\n\nclass GenericAnnotationsQueryCtrl {}\nGenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html'\n\nexport {\n GenericDatasource as Datasource,\n GenericDatasourceQueryCtrl as QueryCtrl,\n GenericConfigCtrl as ConfigCtrl,\n GenericQueryOptionsCtrl as QueryOptionsCtrl,\n GenericAnnotationsQueryCtrl as AnnotationsQueryCtrl\n};\n"]}
|
|
|
@ -1,19 +0,0 @@
|
||||||
<h3 class="page-heading">TDengine Connection</h3>
|
|
||||||
|
|
||||||
<div class="gf-form-group">
|
|
||||||
<div class="gf-form max-width-30">
|
|
||||||
<span class="gf-form-label width-7">Host</span>
|
|
||||||
<input type="text" class="gf-form-input" ng-model='ctrl.current.url' placeholder="http://localhost:6041" bs-typeahead="{{['http://localhost:6041']}}" required></input>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form max-width-15">
|
|
||||||
<span class="gf-form-label width-7">User</span>
|
|
||||||
<input type="text" class="gf-form-input" ng-model='ctrl.current.jsonData.user' placeholder="root"></input>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form max-width-15">
|
|
||||||
<span class="gf-form-label width-7">Password</span>
|
|
||||||
<input type="password" class="gf-form-input" ng-model='ctrl.current.jsonData.password' placeholder="taosdata"></input>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
|
@ -1,58 +0,0 @@
|
||||||
<query-editor-row query-ctrl="ctrl" can-collapse="true" >
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form gf-form--grow">
|
|
||||||
<label class="gf-form-label query-keyword width-7">INPUT SQL</label>
|
|
||||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.sql" spellcheck='false' placeholder="select avg(mem_system) from log.dn where ts >= $from and ts < $to interval($interval)" ng-blur="ctrl.panelCtrl.refresh()" data-mode="sql"></input>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form-inline" ng-hide="ctrl.target.resultFormat === 'table'">
|
|
||||||
<div class="gf-form max-width-30">
|
|
||||||
<label class="gf-form-label query-keyword width-7">ALIAS BY</label>
|
|
||||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.alias" spellcheck='false' placeholder="Naming pattern" ng-blur="ctrl.panelCtrl.refresh()">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form">
|
|
||||||
<label class="gf-form-label query-keyword" ng-click="ctrl.generateSQL()">
|
|
||||||
GENERATE SQL
|
|
||||||
<i class="fa fa-caret-down" ng-show="ctrl.showGenerateSQL"></i>
|
|
||||||
<i class="fa fa-caret-right" ng-hide="ctrl.showGenerateSQL"></i>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form">
|
|
||||||
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
|
|
||||||
SHOW HELP
|
|
||||||
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
|
|
||||||
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.showGenerateSQL">
|
|
||||||
<pre class="gf-form-pre">{{ctrl.lastGenerateSQL}}</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.showHelp">
|
|
||||||
<pre class="gf-form-pre alert alert-info">Use any SQL that can return Resultset such as:
|
|
||||||
- [[timestamp1, value1], [timestamp2, value2], ... ]
|
|
||||||
|
|
||||||
Macros:
|
|
||||||
- $from -> start timestamp of panel
|
|
||||||
- $to -> stop timestamp of panel
|
|
||||||
- $interval -> interval of panel
|
|
||||||
|
|
||||||
Example of SQL:
|
|
||||||
SELECT count(*)
|
|
||||||
FROM db.table
|
|
||||||
WHERE ts > $from and ts < $to
|
|
||||||
INTERVAL ($interval)
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.lastQueryError">
|
|
||||||
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</query-editor-row>
|
|
|
@ -1,35 +0,0 @@
|
||||||
{
|
|
||||||
"name": "TDengine",
|
|
||||||
"id": "taosdata-tdengine-datasource",
|
|
||||||
"type": "datasource",
|
|
||||||
|
|
||||||
"partials": {
|
|
||||||
"config": "partials/config.html"
|
|
||||||
},
|
|
||||||
|
|
||||||
"metrics": true,
|
|
||||||
"annotations": false,
|
|
||||||
|
|
||||||
"info": {
|
|
||||||
"description": "grafana datasource plugin for tdengine",
|
|
||||||
"author": {
|
|
||||||
"name": "Taosdata Inc.",
|
|
||||||
"url": "https://www.taosdata.com"
|
|
||||||
},
|
|
||||||
"logos": {
|
|
||||||
"small": "img/taosdata_logo.png",
|
|
||||||
"large": "img/taosdata_logo.png"
|
|
||||||
},
|
|
||||||
"links": [
|
|
||||||
{"name": "GitHub", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"},
|
|
||||||
{"name": "AGPL 3.0", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine/LICENSE"}
|
|
||||||
],
|
|
||||||
"version": "1.0.0",
|
|
||||||
"updated": "2020-01-13"
|
|
||||||
},
|
|
||||||
|
|
||||||
"dependencies": {
|
|
||||||
"grafanaVersion": "5.2.4",
|
|
||||||
"plugins": [ ]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.GenericDatasourceQueryCtrl = undefined;
|
|
||||||
|
|
||||||
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
|
|
||||||
|
|
||||||
var _sdk = require('app/plugins/sdk');
|
|
||||||
|
|
||||||
require('./css/query-editor.css!');
|
|
||||||
|
|
||||||
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
|
|
||||||
|
|
||||||
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
|
|
||||||
|
|
||||||
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
|
|
||||||
|
|
||||||
var GenericDatasourceQueryCtrl = exports.GenericDatasourceQueryCtrl = function (_QueryCtrl) {
|
|
||||||
_inherits(GenericDatasourceQueryCtrl, _QueryCtrl);
|
|
||||||
|
|
||||||
function GenericDatasourceQueryCtrl($scope, $injector) {
|
|
||||||
_classCallCheck(this, GenericDatasourceQueryCtrl);
|
|
||||||
|
|
||||||
var _this = _possibleConstructorReturn(this, (GenericDatasourceQueryCtrl.__proto__ || Object.getPrototypeOf(GenericDatasourceQueryCtrl)).call(this, $scope, $injector));
|
|
||||||
|
|
||||||
_this.scope = $scope;
|
|
||||||
_this.target.target = _this.target.target || 'select metric';
|
|
||||||
_this.target.type = _this.target.type || 'timeserie';
|
|
||||||
return _this;
|
|
||||||
}
|
|
||||||
|
|
||||||
_createClass(GenericDatasourceQueryCtrl, [{
|
|
||||||
key: 'onChangeInternal',
|
|
||||||
value: function onChangeInternal() {
|
|
||||||
this.panelCtrl.refresh(); // Asks the panel to refresh data.
|
|
||||||
}
|
|
||||||
}, {
|
|
||||||
key: 'generateSQL',
|
|
||||||
value: function generateSQL(query) {
|
|
||||||
this.lastGenerateSQL = this.datasource.generateSql(this.panelCtrl, this.target);
|
|
||||||
this.showGenerateSQL = !this.showGenerateSQL;
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return GenericDatasourceQueryCtrl;
|
|
||||||
}(_sdk.QueryCtrl);
|
|
||||||
|
|
||||||
GenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';
|
|
||||||
//# sourceMappingURL=query_ctrl.js.map
|
|
|
@ -1 +0,0 @@
|
||||||
{"version":3,"sources":["../src/query_ctrl.js"],"names":["GenericDatasourceQueryCtrl","$scope","$injector","scope","target","type","panelCtrl","refresh","query","lastGenerateSQL","datasource","generateSql","showGenerateSQL","QueryCtrl","templateUrl"],"mappings":";;;;;;;;;AAAA;;AACA;;;;;;;;IAEaA,0B,WAAAA,0B;;;AAEX,sCAAYC,MAAZ,EAAoBC,SAApB,EAAgC;AAAA;;AAAA,wJACxBD,MADwB,EAChBC,SADgB;;AAG9B,UAAKC,KAAL,GAAaF,MAAb;AACA,UAAKG,MAAL,CAAYA,MAAZ,GAAqB,MAAKA,MAAL,CAAYA,MAAZ,IAAsB,eAA3C;AACA,UAAKA,MAAL,CAAYC,IAAZ,GAAmB,MAAKD,MAAL,CAAYC,IAAZ,IAAoB,WAAvC;AAL8B;AAM/B;;;;uCAEkB;AACjB,WAAKC,SAAL,CAAeC,OAAf,GADiB,CACS;AAC3B;;;gCAEWC,K,EAAO;AACjB,WAAKC,eAAL,GAAuB,KAAKC,UAAL,CAAgBC,WAAhB,CAA6B,KAAKL,SAAlC,EAA6C,KAAKF,MAAlD,CAAvB;AACA,WAAKQ,eAAL,GAAuB,CAAC,KAAKA,eAA7B;AACD;;;;EAjB6CC,c;;AAqBhDb,2BAA2Bc,WAA3B,GAAyC,4BAAzC","file":"query_ctrl.js","sourcesContent":["import {QueryCtrl} from 'app/plugins/sdk';\nimport './css/query-editor.css!'\n\nexport class GenericDatasourceQueryCtrl extends QueryCtrl {\n\n constructor($scope, $injector) {\n super($scope, $injector);\n\n this.scope = $scope;\n this.target.target = this.target.target || 'select metric';\n this.target.type = this.target.type || 'timeserie';\n }\n\n onChangeInternal() {\n this.panelCtrl.refresh(); // Asks the panel to refresh data.\n }\n\n generateSQL(query) {\n this.lastGenerateSQL = this.datasource.generateSql( this.panelCtrl, this.target);\n this.showGenerateSQL = !this.showGenerateSQL;\n }\n\n}\n\nGenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';"]}
|
|
|
@ -1,45 +0,0 @@
|
||||||
{
|
|
||||||
"name": "TDengine",
|
|
||||||
"private": false,
|
|
||||||
"version": "2.0.0",
|
|
||||||
"description": "grafana datasource plugin for tdengine",
|
|
||||||
"scripts": {
|
|
||||||
"build": "./node_modules/grunt-cli/bin/grunt",
|
|
||||||
"test": "./node_modules/grunt-cli/bin/grunt mochaTest"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/taosdata/TDengine.git"
|
|
||||||
},
|
|
||||||
"author": "https://www.taosdata.com",
|
|
||||||
"license": "AGPL 3.0",
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/taosdata/TDengine/issues"
|
|
||||||
},
|
|
||||||
"engineStrict": true,
|
|
||||||
"devDependencies": {
|
|
||||||
"babel": "^6.23.0",
|
|
||||||
"babel-plugin-transform-object-rest-spread": "^6.26.0",
|
|
||||||
"babel-preset-env": "^1.7.0",
|
|
||||||
"chai": "~3.5.0",
|
|
||||||
"grunt": "^1.0.4",
|
|
||||||
"grunt-babel": "~6.0.0",
|
|
||||||
"grunt-cli": "^1.2.0",
|
|
||||||
"grunt-contrib-clean": "^1.1.0",
|
|
||||||
"grunt-contrib-copy": "^1.0.0",
|
|
||||||
"grunt-contrib-uglify": "^2.3.0",
|
|
||||||
"grunt-contrib-watch": "^1.0.0",
|
|
||||||
"grunt-mocha-test": "^0.13.2",
|
|
||||||
"grunt-systemjs-builder": "^1.0.0",
|
|
||||||
"jsdom": "~9.12.0",
|
|
||||||
"load-grunt-tasks": "^3.5.2",
|
|
||||||
"mocha": "^6.2.2",
|
|
||||||
"prunk": "^1.3.0",
|
|
||||||
"q": "^1.5.0"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"lodash": "^4.17.19",
|
|
||||||
"yarn": "^1.22.0"
|
|
||||||
},
|
|
||||||
"homepage": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"
|
|
||||||
}
|
|
|
@ -1,22 +0,0 @@
|
||||||
import {Datasource} from "../module";
|
|
||||||
import Q from "q";
|
|
||||||
|
|
||||||
describe('GenericDatasource', function() {
|
|
||||||
var ctx = {};
|
|
||||||
|
|
||||||
beforeEach(function() {
|
|
||||||
ctx.$q = Q;
|
|
||||||
ctx.backendSrv = {};
|
|
||||||
ctx.templateSrv = {};
|
|
||||||
ctx.ds = new Datasource({}, ctx.$q, ctx.backendSrv, ctx.templateSrv);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return an empty array when no targets are set', function(done) {
|
|
||||||
ctx.ds.query({targets: []}).then(function(result) {
|
|
||||||
expect(result.data).to.have.length(0);
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
});
|
|
|
@ -1,20 +0,0 @@
|
||||||
import prunk from 'prunk';
|
|
||||||
import {jsdom} from 'jsdom';
|
|
||||||
import chai from 'chai';
|
|
||||||
|
|
||||||
// Mock Grafana modules that are not available outside of the core project
|
|
||||||
// Required for loading module.js
|
|
||||||
prunk.mock('./css/query-editor.css!', 'no css, dude.');
|
|
||||||
prunk.mock('app/plugins/sdk', {
|
|
||||||
QueryCtrl: null
|
|
||||||
});
|
|
||||||
|
|
||||||
// Setup jsdom
|
|
||||||
// Required for loading angularjs
|
|
||||||
global.document = jsdom('<html><head><script></script></head><body></body></html>');
|
|
||||||
global.window = global.document.parentWindow;
|
|
||||||
|
|
||||||
// Setup Chai
|
|
||||||
chai.should();
|
|
||||||
global.assert = chai.assert;
|
|
||||||
global.expect = chai.expect;
|
|
|
@ -1,3 +0,0 @@
|
||||||
.generic-datasource-query-row .query-keyword {
|
|
||||||
width: 75px;
|
|
||||||
}
|
|
|
@ -1,127 +0,0 @@
|
||||||
import _ from "lodash";
|
|
||||||
|
|
||||||
export class GenericDatasource {
|
|
||||||
|
|
||||||
constructor(instanceSettings, $q, backendSrv, templateSrv) {
|
|
||||||
this.type = instanceSettings.type;
|
|
||||||
this.url = instanceSettings.url;
|
|
||||||
this.name = instanceSettings.name;
|
|
||||||
this.q = $q;
|
|
||||||
this.backendSrv = backendSrv;
|
|
||||||
this.templateSrv = templateSrv;
|
|
||||||
this.headers = {'Content-Type': 'application/json'};
|
|
||||||
this.headers.Authorization = this.getAuthorization(instanceSettings.jsonData);
|
|
||||||
}
|
|
||||||
|
|
||||||
query(options) {
|
|
||||||
var targets = this.buildQueryParameters(options);
|
|
||||||
|
|
||||||
if (targets.length <= 0) {
|
|
||||||
return this.q.when({data: []});
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.doRequest({
|
|
||||||
url: this.url + '/grafana/query',
|
|
||||||
data: targets,
|
|
||||||
method: 'POST'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
testDatasource() {
|
|
||||||
return this.doRequest({
|
|
||||||
url: this.url + '/grafana/heartbeat',
|
|
||||||
method: 'GET',
|
|
||||||
}).then(response => {
|
|
||||||
if (response.status === 200) {
|
|
||||||
return { status: "success", message: "TDengine Data source is working", title: "Success" };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
doRequest(options) {
|
|
||||||
options.headers = this.headers;
|
|
||||||
|
|
||||||
return this.backendSrv.datasourceRequest(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
buildQueryParameters(options) {
|
|
||||||
|
|
||||||
var targets = _.map(options.targets, target => {
|
|
||||||
return {
|
|
||||||
refId: target.refId,
|
|
||||||
alias: this.generateAlias(options, target),
|
|
||||||
sql: this.generateSql(options, target)
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return targets;
|
|
||||||
}
|
|
||||||
|
|
||||||
encode(input) {
|
|
||||||
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
|
||||||
var output = "";
|
|
||||||
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
|
|
||||||
var i = 0;
|
|
||||||
while (i < input.length) {
|
|
||||||
chr1 = input.charCodeAt(i++);
|
|
||||||
chr2 = input.charCodeAt(i++);
|
|
||||||
chr3 = input.charCodeAt(i++);
|
|
||||||
enc1 = chr1 >> 2;
|
|
||||||
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
|
||||||
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
|
||||||
enc4 = chr3 & 63;
|
|
||||||
if (isNaN(chr2)) {
|
|
||||||
enc3 = enc4 = 64;
|
|
||||||
} else if (isNaN(chr3)) {
|
|
||||||
enc4 = 64;
|
|
||||||
}
|
|
||||||
output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);
|
|
||||||
}
|
|
||||||
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
getAuthorization(jsonData){
|
|
||||||
jsonData = jsonData || {};
|
|
||||||
var defaultUser = jsonData.user || "root";
|
|
||||||
var defaultPassword = jsonData.password || "taosdata";
|
|
||||||
|
|
||||||
return "Basic " + this.encode(defaultUser + ":" + defaultPassword);
|
|
||||||
}
|
|
||||||
|
|
||||||
generateAlias(options, target){
|
|
||||||
var alias = target.alias || "";
|
|
||||||
alias = this.templateSrv.replace(alias, options.scopedVars, 'csv');
|
|
||||||
return alias;
|
|
||||||
}
|
|
||||||
|
|
||||||
generateSql(options, target) {
|
|
||||||
var sql = target.sql;
|
|
||||||
if (sql == null || sql == ""){
|
|
||||||
return sql;
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryStart = "now-1h";
|
|
||||||
if (options != null && options.range != null && options.range.from != null){
|
|
||||||
queryStart = options.range.from.toISOString();
|
|
||||||
}
|
|
||||||
|
|
||||||
var queryEnd = "now";
|
|
||||||
if (options != null && options.range != null && options.range.to != null){
|
|
||||||
queryEnd = options.range.to.toISOString();
|
|
||||||
}
|
|
||||||
var intervalMs = options.intervalMs || "20000";
|
|
||||||
|
|
||||||
intervalMs += "a";
|
|
||||||
sql = sql.replace(/^\s+|\s+$/gm, '');
|
|
||||||
sql = sql.replace("$from", "'" + queryStart + "'");
|
|
||||||
sql = sql.replace("$begin", "'" + queryStart + "'");
|
|
||||||
sql = sql.replace("$to", "'" + queryEnd + "'");
|
|
||||||
sql = sql.replace("$end", "'" + queryEnd + "'");
|
|
||||||
sql = sql.replace("$interval", intervalMs);
|
|
||||||
|
|
||||||
sql = this.templateSrv.replace(sql, options.scopedVars, 'csv');
|
|
||||||
return sql;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
Before Width: | Height: | Size: 3.1 KiB |
|
@ -1,19 +0,0 @@
|
||||||
import {GenericDatasource} from './datasource';
|
|
||||||
import {GenericDatasourceQueryCtrl} from './query_ctrl';
|
|
||||||
|
|
||||||
class GenericConfigCtrl {}
|
|
||||||
GenericConfigCtrl.templateUrl = 'partials/config.html';
|
|
||||||
|
|
||||||
class GenericQueryOptionsCtrl {}
|
|
||||||
GenericQueryOptionsCtrl.templateUrl = 'partials/query.options.html';
|
|
||||||
|
|
||||||
class GenericAnnotationsQueryCtrl {}
|
|
||||||
GenericAnnotationsQueryCtrl.templateUrl = 'partials/annotations.editor.html'
|
|
||||||
|
|
||||||
export {
|
|
||||||
GenericDatasource as Datasource,
|
|
||||||
GenericDatasourceQueryCtrl as QueryCtrl,
|
|
||||||
GenericConfigCtrl as ConfigCtrl,
|
|
||||||
GenericQueryOptionsCtrl as QueryOptionsCtrl,
|
|
||||||
GenericAnnotationsQueryCtrl as AnnotationsQueryCtrl
|
|
||||||
};
|
|
|
@ -1,19 +0,0 @@
|
||||||
<h3 class="page-heading">TDengine Connection</h3>
|
|
||||||
|
|
||||||
<div class="gf-form-group">
|
|
||||||
<div class="gf-form max-width-30">
|
|
||||||
<span class="gf-form-label width-7">Host</span>
|
|
||||||
<input type="text" class="gf-form-input" ng-model='ctrl.current.url' placeholder="http://localhost:6041" bs-typeahead="{{['http://localhost:6041']}}" required></input>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form max-width-15">
|
|
||||||
<span class="gf-form-label width-7">User</span>
|
|
||||||
<input type="text" class="gf-form-input" ng-model='ctrl.current.jsonData.user' placeholder="root"></input>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form max-width-15">
|
|
||||||
<span class="gf-form-label width-7">Password</span>
|
|
||||||
<input type="password" class="gf-form-input" ng-model='ctrl.current.jsonData.password' placeholder="taosdata"></input>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
|
@ -1,58 +0,0 @@
|
||||||
<query-editor-row query-ctrl="ctrl" can-collapse="true" >
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form gf-form--grow">
|
|
||||||
<label class="gf-form-label query-keyword width-7">INPUT SQL</label>
|
|
||||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.sql" spellcheck='false' placeholder="select avg(mem_system) from log.dn where ts >= $from and ts < $to interval($interval)" ng-blur="ctrl.panelCtrl.refresh()" data-mode="sql"></input>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form-inline">
|
|
||||||
<div class="gf-form-inline" ng-hide="ctrl.target.resultFormat === 'table'">
|
|
||||||
<div class="gf-form max-width-30">
|
|
||||||
<label class="gf-form-label query-keyword width-7">ALIAS BY</label>
|
|
||||||
<input type="text" class="gf-form-input" ng-model="ctrl.target.alias" spellcheck='false' placeholder="Naming pattern" ng-blur="ctrl.panelCtrl.refresh()">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form">
|
|
||||||
<label class="gf-form-label query-keyword" ng-click="ctrl.generateSQL()">
|
|
||||||
GENERATE SQL
|
|
||||||
<i class="fa fa-caret-down" ng-show="ctrl.showGenerateSQL"></i>
|
|
||||||
<i class="fa fa-caret-right" ng-hide="ctrl.showGenerateSQL"></i>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="gf-form">
|
|
||||||
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
|
|
||||||
SHOW HELP
|
|
||||||
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
|
|
||||||
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.showGenerateSQL">
|
|
||||||
<pre class="gf-form-pre">{{ctrl.lastGenerateSQL}}</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.showHelp">
|
|
||||||
<pre class="gf-form-pre alert alert-info">Use any SQL that can return Resultset such as:
|
|
||||||
- [[timestamp1, value1], [timestamp2, value2], ... ]
|
|
||||||
|
|
||||||
Macros:
|
|
||||||
- $from -> start timestamp of panel
|
|
||||||
- $to -> stop timestamp of panel
|
|
||||||
- $interval -> interval of panel
|
|
||||||
|
|
||||||
Example of SQL:
|
|
||||||
SELECT count(*)
|
|
||||||
FROM db.table
|
|
||||||
WHERE ts > $from and ts < $to
|
|
||||||
INTERVAL ($interval)
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="gf-form" ng-show="ctrl.lastQueryError">
|
|
||||||
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</query-editor-row>
|
|
|
@ -1,35 +0,0 @@
|
||||||
{
|
|
||||||
"name": "TDengine",
|
|
||||||
"id": "taosdata-tdengine-datasource",
|
|
||||||
"type": "datasource",
|
|
||||||
|
|
||||||
"partials": {
|
|
||||||
"config": "partials/config.html"
|
|
||||||
},
|
|
||||||
|
|
||||||
"metrics": true,
|
|
||||||
"annotations": false,
|
|
||||||
|
|
||||||
"info": {
|
|
||||||
"description": "grafana datasource plugin for tdengine",
|
|
||||||
"author": {
|
|
||||||
"name": "Taosdata Inc.",
|
|
||||||
"url": "https://www.taosdata.com"
|
|
||||||
},
|
|
||||||
"logos": {
|
|
||||||
"small": "img/taosdata_logo.png",
|
|
||||||
"large": "img/taosdata_logo.png"
|
|
||||||
},
|
|
||||||
"links": [
|
|
||||||
{"name": "GitHub", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine"},
|
|
||||||
{"name": "AGPL 3.0", "url": "https://github.com/taosdata/TDengine/tree/develop/src/connector/grafana/tdengine/LICENSE"}
|
|
||||||
],
|
|
||||||
"version": "1.0.0",
|
|
||||||
"updated": "2020-01-13"
|
|
||||||
},
|
|
||||||
|
|
||||||
"dependencies": {
|
|
||||||
"grafanaVersion": "5.2.4",
|
|
||||||
"plugins": [ ]
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
import {QueryCtrl} from 'app/plugins/sdk';
|
|
||||||
import './css/query-editor.css!'
|
|
||||||
|
|
||||||
export class GenericDatasourceQueryCtrl extends QueryCtrl {
|
|
||||||
|
|
||||||
constructor($scope, $injector) {
|
|
||||||
super($scope, $injector);
|
|
||||||
|
|
||||||
this.scope = $scope;
|
|
||||||
this.target.target = this.target.target || 'select metric';
|
|
||||||
this.target.type = this.target.type || 'timeserie';
|
|
||||||
}
|
|
||||||
|
|
||||||
onChangeInternal() {
|
|
||||||
this.panelCtrl.refresh(); // Asks the panel to refresh data.
|
|
||||||
}
|
|
||||||
|
|
||||||
generateSQL(query) {
|
|
||||||
this.lastGenerateSQL = this.datasource.generateSql( this.panelCtrl, this.target);
|
|
||||||
this.showGenerateSQL = !this.showGenerateSQL;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
GenericDatasourceQueryCtrl.templateUrl = 'partials/query.editor.html';
|
|
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit d598db167eb256fe67409b7bb3d0eb7fffc3ff8c
|
|
@ -111,6 +111,8 @@ public class TSDBJNIConnector {
|
||||||
* @throws SQLException
|
* @throws SQLException
|
||||||
*/
|
*/
|
||||||
public long executeQuery(String sql) throws SQLException {
|
public long executeQuery(String sql) throws SQLException {
|
||||||
|
// close previous result set if the user forgets to invoke the
|
||||||
|
// free method to close previous result set.
|
||||||
if (!this.isResultsetClosed) {
|
if (!this.isResultsetClosed) {
|
||||||
freeResultSet(taosResultSetPointer);
|
freeResultSet(taosResultSetPointer);
|
||||||
}
|
}
|
||||||
|
@ -123,21 +125,20 @@ public class TSDBJNIConnector {
|
||||||
this.freeResultSet(pSql);
|
this.freeResultSet(pSql);
|
||||||
throw new SQLException(TSDBConstants.WrapErrMsg("Unsupported encoding"));
|
throw new SQLException(TSDBConstants.WrapErrMsg("Unsupported encoding"));
|
||||||
}
|
}
|
||||||
|
|
||||||
int code = this.getErrCode(pSql);
|
int code = this.getErrCode(pSql);
|
||||||
if (code != 0) {
|
if (code != 0) {
|
||||||
affectedRows = -1;
|
affectedRows = -1;
|
||||||
String err_msg = this.getErrMsg(pSql);
|
String msg = this.getErrMsg(pSql);
|
||||||
|
|
||||||
this.freeResultSet(pSql);
|
this.freeResultSet(pSql);
|
||||||
throw new SQLException(TSDBConstants.WrapErrMsg(err_msg), "", code);
|
throw new SQLException(TSDBConstants.WrapErrMsg(msg), "", code);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try retrieving result set for the executed SQL using the current connection pointer. If the executed
|
// Try retrieving result set for the executed SQL using the current connection pointer.
|
||||||
// SQL is a DML/DDL which doesn't return a result set, then taosResultSetPointer should be 0L. Otherwise,
|
|
||||||
// taosResultSetPointer should be a non-zero value.
|
|
||||||
taosResultSetPointer = this.getResultSetImp(this.taos, pSql);
|
taosResultSetPointer = this.getResultSetImp(this.taos, pSql);
|
||||||
if (taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
isResultsetClosed = (taosResultSetPointer == TSDBConstants.JNI_NULL_POINTER);
|
||||||
isResultsetClosed = false;
|
|
||||||
}
|
|
||||||
return pSql;
|
return pSql;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,6 +171,12 @@ public class TSDBJNIConnector {
|
||||||
}
|
}
|
||||||
|
|
||||||
private native long getResultSetImp(long connection, long pSql);
|
private native long getResultSetImp(long connection, long pSql);
|
||||||
|
|
||||||
|
public boolean isUpdateQuery(long pSql) {
|
||||||
|
return isUpdateQueryImp(this.taos, pSql) == 1? true:false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private native long isUpdateQueryImp(long connection, long pSql);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Free resultset operation from C to release resultset pointer by JNI
|
* Free resultset operation from C to release resultset pointer by JNI
|
||||||
|
@ -178,13 +185,14 @@ public class TSDBJNIConnector {
|
||||||
int res = TSDBConstants.JNI_SUCCESS;
|
int res = TSDBConstants.JNI_SUCCESS;
|
||||||
if (result != taosResultSetPointer && taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
if (result != taosResultSetPointer && taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
||||||
throw new RuntimeException("Invalid result set pointer");
|
throw new RuntimeException("Invalid result set pointer");
|
||||||
} else if (taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
|
||||||
res = this.freeResultSetImp(this.taos, result);
|
|
||||||
isResultsetClosed = true; // reset resultSetPointer to 0 after freeResultSetImp() return
|
|
||||||
taosResultSetPointer = TSDBConstants.JNI_NULL_POINTER;
|
|
||||||
} else {
|
|
||||||
isResultsetClosed = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
||||||
|
res = this.freeResultSetImp(this.taos, result);
|
||||||
|
taosResultSetPointer = TSDBConstants.JNI_NULL_POINTER;
|
||||||
|
}
|
||||||
|
|
||||||
|
isResultsetClosed = true;
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,7 @@ public class TSDBStatement implements Statement {
|
||||||
* Status of current statement
|
* Status of current statement
|
||||||
*/
|
*/
|
||||||
private boolean isClosed = true;
|
private boolean isClosed = true;
|
||||||
|
private int affectedRows = 0;
|
||||||
|
|
||||||
TSDBStatement(TSDBJNIConnector connecter) {
|
TSDBStatement(TSDBJNIConnector connecter) {
|
||||||
this.connecter = connecter;
|
this.connecter = connecter;
|
||||||
|
@ -51,6 +52,8 @@ public class TSDBStatement implements Statement {
|
||||||
if (isClosed) {
|
if (isClosed) {
|
||||||
throw new SQLException("Invalid method call on a closed statement.");
|
throw new SQLException("Invalid method call on a closed statement.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO make sure it is not a update query
|
||||||
pSql = this.connecter.executeQuery(sql);
|
pSql = this.connecter.executeQuery(sql);
|
||||||
|
|
||||||
long resultSetPointer = this.connecter.getResultSet();
|
long resultSetPointer = this.connecter.getResultSet();
|
||||||
|
@ -58,33 +61,41 @@ public class TSDBStatement implements Statement {
|
||||||
if (resultSetPointer == TSDBConstants.JNI_CONNECTION_NULL) {
|
if (resultSetPointer == TSDBConstants.JNI_CONNECTION_NULL) {
|
||||||
this.connecter.freeResultSet(pSql);
|
this.connecter.freeResultSet(pSql);
|
||||||
throw new SQLException(TSDBConstants.FixErrMsg(TSDBConstants.JNI_CONNECTION_NULL));
|
throw new SQLException(TSDBConstants.FixErrMsg(TSDBConstants.JNI_CONNECTION_NULL));
|
||||||
} else if (resultSetPointer == TSDBConstants.JNI_NULL_POINTER) {
|
}
|
||||||
// create/insert/update/del/alter
|
|
||||||
|
// create/insert/update/delete/alter
|
||||||
|
if (resultSetPointer == TSDBConstants.JNI_NULL_POINTER) {
|
||||||
this.connecter.freeResultSet(pSql);
|
this.connecter.freeResultSet(pSql);
|
||||||
return null;
|
return null;
|
||||||
} else {
|
|
||||||
return new TSDBResultSet(this.connecter, resultSetPointer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!this.connecter.isUpdateQuery(pSql)) {
|
||||||
|
return new TSDBResultSet(this.connecter, resultSetPointer);
|
||||||
|
} else {
|
||||||
|
this.connecter.freeResultSet(pSql);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int executeUpdate(String sql) throws SQLException {
|
public int executeUpdate(String sql) throws SQLException {
|
||||||
if (isClosed) {
|
if (isClosed) {
|
||||||
throw new SQLException("Invalid method call on a closed statement.");
|
throw new SQLException("Invalid method call on a closed statement.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO check if current query is update query
|
||||||
pSql = this.connecter.executeQuery(sql);
|
pSql = this.connecter.executeQuery(sql);
|
||||||
long resultSetPointer = this.connecter.getResultSet();
|
long resultSetPointer = this.connecter.getResultSet();
|
||||||
|
|
||||||
if (resultSetPointer == TSDBConstants.JNI_CONNECTION_NULL) {
|
if (resultSetPointer == TSDBConstants.JNI_CONNECTION_NULL) {
|
||||||
this.connecter.freeResultSet(pSql);
|
this.connecter.freeResultSet(pSql);
|
||||||
throw new SQLException(TSDBConstants.FixErrMsg(TSDBConstants.JNI_CONNECTION_NULL));
|
throw new SQLException(TSDBConstants.FixErrMsg(TSDBConstants.JNI_CONNECTION_NULL));
|
||||||
} else if (resultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
|
|
||||||
this.connecter.freeResultSet();
|
|
||||||
throw new SQLException("The executed SQL is not a DML or a DDL");
|
|
||||||
} else {
|
|
||||||
int num = this.connecter.getAffectedRows(pSql);
|
|
||||||
this.connecter.freeResultSet(pSql);
|
|
||||||
return num;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.affectedRows = this.connecter.getAffectedRows(pSql);
|
||||||
|
this.connecter.freeResultSet(pSql);
|
||||||
|
|
||||||
|
return this.affectedRows;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getErrorMsg(long pSql) {
|
public String getErrorMsg(long pSql) {
|
||||||
|
@ -181,7 +192,8 @@ public class TSDBStatement implements Statement {
|
||||||
if (isClosed) {
|
if (isClosed) {
|
||||||
throw new SQLException("Invalid method call on a closed statement.");
|
throw new SQLException("Invalid method call on a closed statement.");
|
||||||
}
|
}
|
||||||
return this.connecter.getAffectedRows(this.pSql);
|
|
||||||
|
return this.affectedRows;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean getMoreResults() throws SQLException {
|
public boolean getMoreResults() throws SQLException {
|
||||||
|
|
|
@ -6,6 +6,10 @@ AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SRC)
|
||||||
|
|
||||||
IF (TD_LINUX)
|
IF (TD_LINUX)
|
||||||
ADD_LIBRARY(tcq ${SRC})
|
ADD_LIBRARY(tcq ${SRC})
|
||||||
TARGET_LINK_LIBRARIES(tcq tutil common taos)
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(tcq tutil common taos_static)
|
||||||
|
ELSE ()
|
||||||
|
TARGET_LINK_LIBRARIES(tcq tutil common taos)
|
||||||
|
ENDIF ()
|
||||||
ADD_SUBDIRECTORY(test)
|
ADD_SUBDIRECTORY(test)
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
|
@ -11,7 +11,11 @@ AUX_SOURCE_DIRECTORY(src SRC)
|
||||||
|
|
||||||
IF (TD_LINUX)
|
IF (TD_LINUX)
|
||||||
ADD_EXECUTABLE(taosd ${SRC})
|
ADD_EXECUTABLE(taosd ${SRC})
|
||||||
TARGET_LINK_LIBRARIES(taosd mnode taos monitor http mqtt tsdb twal vnode cJson lz4 balance sync)
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(taosd mnode taos_static monitor http mqtt tsdb twal vnode cJson lz4 balance sync)
|
||||||
|
ELSE ()
|
||||||
|
TARGET_LINK_LIBRARIES(taosd mnode taos monitor http mqtt tsdb twal vnode cJson lz4 balance sync)
|
||||||
|
ENDIF ()
|
||||||
|
|
||||||
IF (TD_ACCOUNT)
|
IF (TD_ACCOUNT)
|
||||||
TARGET_LINK_LIBRARIES(taosd account)
|
TARGET_LINK_LIBRARIES(taosd account)
|
||||||
|
@ -35,4 +39,4 @@ IF (TD_LINUX)
|
||||||
COMMAND ${CMAKE_COMMAND} -E echo charset UTF-8 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
|
COMMAND ${CMAKE_COMMAND} -E echo charset UTF-8 >> ${TD_TESTS_OUTPUT_DIR}/cfg/taos.cfg
|
||||||
COMMENT "prepare taosd environment")
|
COMMENT "prepare taosd environment")
|
||||||
ADD_CUSTOM_TARGET(${PREPARE_ENV_TARGET} ALL WORKING_DIRECTORY ${TD_EXECUTABLE_OUTPUT_PATH} DEPENDS ${PREPARE_ENV_CMD})
|
ADD_CUSTOM_TARGET(${PREPARE_ENV_TARGET} ALL WORKING_DIRECTORY ${TD_EXECUTABLE_OUTPUT_PATH} DEPENDS ${PREPARE_ENV_CMD})
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
|
@ -131,6 +131,8 @@ static int dnodeCheckCpu() {
|
||||||
}
|
}
|
||||||
|
|
||||||
static int dnodeCheckDisk() {
|
static int dnodeCheckDisk() {
|
||||||
|
taosGetDisk();
|
||||||
|
|
||||||
if (tsAvailDataDirGB < tsMinimalDataDirGB) {
|
if (tsAvailDataDirGB < tsMinimalDataDirGB) {
|
||||||
dError("free disk size: %f GB, too little, quit", tsAvailDataDirGB);
|
dError("free disk size: %f GB, too little, quit", tsAvailDataDirGB);
|
||||||
return -1;
|
return -1;
|
||||||
|
|
|
@ -49,8 +49,8 @@ typedef struct {
|
||||||
} SDnodeComponent;
|
} SDnodeComponent;
|
||||||
|
|
||||||
static const SDnodeComponent tsDnodeComponents[] = {
|
static const SDnodeComponent tsDnodeComponents[] = {
|
||||||
{"check", dnodeInitCheck, dnodeCleanupCheck}, // NOTES: dnodeInitCheck must be first component !!!
|
|
||||||
{"storage", dnodeInitStorage, dnodeCleanupStorage},
|
{"storage", dnodeInitStorage, dnodeCleanupStorage},
|
||||||
|
{"check", dnodeInitCheck, dnodeCleanupCheck}, // NOTES: dnodeInitCheck must be behind the dnodeinitStorage component !!!
|
||||||
{"vread", dnodeInitVnodeRead, dnodeCleanupVnodeRead},
|
{"vread", dnodeInitVnodeRead, dnodeCleanupVnodeRead},
|
||||||
{"vwrite", dnodeInitVnodeWrite, dnodeCleanupVnodeWrite},
|
{"vwrite", dnodeInitVnodeWrite, dnodeCleanupVnodeWrite},
|
||||||
{"mread", dnodeInitMnodeRead, dnodeCleanupMnodeRead},
|
{"mread", dnodeInitMnodeRead, dnodeCleanupMnodeRead},
|
||||||
|
|
|
@ -165,6 +165,13 @@ int32_t dnodeInitMgmtTimer() {
|
||||||
return TSDB_CODE_SUCCESS;
|
return TSDB_CODE_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void dnodeSendStatusMsgToMnode() {
|
||||||
|
if (tsDnodeTmr != NULL && tsStatusTimer != NULL) {
|
||||||
|
dInfo("force send status msg to mnode");
|
||||||
|
taosTmrReset(dnodeSendStatusMsg, 3, NULL, tsDnodeTmr, &tsStatusTimer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void dnodeCleanupMgmtTimer() {
|
void dnodeCleanupMgmtTimer() {
|
||||||
if (tsStatusTimer != NULL) {
|
if (tsStatusTimer != NULL) {
|
||||||
taosTmrStopA(&tsStatusTimer);
|
taosTmrStopA(&tsStatusTimer);
|
||||||
|
|
|
@ -93,7 +93,7 @@ static void dnodeProcessReqMsgFromDnode(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
|
||||||
if (pMsg->pCont == NULL) return;
|
if (pMsg->pCont == NULL) return;
|
||||||
|
|
||||||
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
|
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
|
||||||
rspMsg.code = TSDB_CODE_RPC_NOT_READY;
|
rspMsg.code = TSDB_CODE_APP_NOT_READY;
|
||||||
rpcSendResponse(&rspMsg);
|
rpcSendResponse(&rspMsg);
|
||||||
rpcFreeCont(pMsg->pCont);
|
rpcFreeCont(pMsg->pCont);
|
||||||
dDebug("RPC %p, msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
|
dDebug("RPC %p, msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
|
||||||
|
|
|
@ -119,7 +119,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
|
||||||
|
|
||||||
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
|
if (dnodeGetRunStatus() != TSDB_DNODE_RUN_STATUS_RUNING) {
|
||||||
dError("RPC %p, shell msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
|
dError("RPC %p, shell msg:%s is ignored since dnode not running", pMsg->handle, taosMsg[pMsg->msgType]);
|
||||||
rpcMsg.code = TSDB_CODE_RPC_NOT_READY;
|
rpcMsg.code = TSDB_CODE_APP_NOT_READY;
|
||||||
rpcSendResponse(&rpcMsg);
|
rpcSendResponse(&rpcMsg);
|
||||||
rpcFreeCont(pMsg->pCont);
|
rpcFreeCont(pMsg->pCont);
|
||||||
return;
|
return;
|
||||||
|
@ -144,7 +144,7 @@ void dnodeProcessMsgFromShell(SRpcMsg *pMsg, SRpcEpSet *pEpSet) {
|
||||||
|
|
||||||
static int dnodeRetrieveUserAuthInfo(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
|
static int dnodeRetrieveUserAuthInfo(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
|
||||||
int code = mnodeRetriveAuth(user, spi, encrypt, secret, ckey);
|
int code = mnodeRetriveAuth(user, spi, encrypt, secret, ckey);
|
||||||
if (code != TSDB_CODE_RPC_NOT_READY) return code;
|
if (code != TSDB_CODE_APP_NOT_READY) return code;
|
||||||
|
|
||||||
SDMAuthMsg *pMsg = rpcMallocCont(sizeof(SDMAuthMsg));
|
SDMAuthMsg *pMsg = rpcMallocCont(sizeof(SDMAuthMsg));
|
||||||
tstrncpy(pMsg->user, user, sizeof(pMsg->user));
|
tstrncpy(pMsg->user, user, sizeof(pMsg->user));
|
||||||
|
|
|
@ -65,6 +65,8 @@ void dnodeSendRpcMnodeWriteRsp(void *pMsg, int32_t code);
|
||||||
void dnodeReprocessMnodeWriteMsg(void *pMsg);
|
void dnodeReprocessMnodeWriteMsg(void *pMsg);
|
||||||
void dnodeDelayReprocessMnodeWriteMsg(void *pMsg);
|
void dnodeDelayReprocessMnodeWriteMsg(void *pMsg);
|
||||||
|
|
||||||
|
void dnodeSendStatusMsgToMnode();
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -65,6 +65,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_SESSION_ID, 0, 0x0010, "Invalid se
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_MSG_TYPE, 0, 0x0011, "Invalid message type")
|
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_MSG_TYPE, 0, 0x0011, "Invalid message type")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_RESPONSE_TYPE, 0, 0x0012, "Invalid response type")
|
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_RESPONSE_TYPE, 0, 0x0012, "Invalid response type")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_TIME_STAMP, 0, 0x0013, "Invalid timestamp")
|
TAOS_DEFINE_ERROR(TSDB_CODE_RPC_INVALID_TIME_STAMP, 0, 0x0013, "Invalid timestamp")
|
||||||
|
TAOS_DEFINE_ERROR(TSDB_CODE_APP_NOT_READY, 0, 0x0014, "Database not ready")
|
||||||
|
|
||||||
//common & util
|
//common & util
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_COM_OPS_NOT_SUPPORT, 0, 0x0100, "Operation not supported")
|
TAOS_DEFINE_ERROR(TSDB_CODE_COM_OPS_NOT_SUPPORT, 0, 0x0100, "Operation not supported")
|
||||||
|
@ -184,7 +185,6 @@ TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_DISK_PERMISSIONS, 0, 0x0506, "No write p
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_SUCH_FILE_OR_DIR, 0, 0x0507, "Missing data file")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_SUCH_FILE_OR_DIR, 0, 0x0507, "Missing data file")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_OUT_OF_MEMORY, 0, 0x0508, "Out of memory")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_OUT_OF_MEMORY, 0, 0x0508, "Out of memory")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_APP_ERROR, 0, 0x0509, "Unexpected generic error in vnode")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_APP_ERROR, 0, 0x0509, "Unexpected generic error in vnode")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_INVALID_STATUS, 0, 0x0510, "Database not ready")
|
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, 0, 0x0511, "Database suspended")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, 0, 0x0511, "Database suspended")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, 0, 0x0512, "Write operation denied")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, 0, 0x0512, "Write operation denied")
|
||||||
|
|
||||||
|
|
|
@ -11,11 +11,11 @@ IF (TD_LINUX)
|
||||||
LIST(REMOVE_ITEM SRC ./src/shellDarwin.c)
|
LIST(REMOVE_ITEM SRC ./src/shellDarwin.c)
|
||||||
ADD_EXECUTABLE(shell ${SRC})
|
ADD_EXECUTABLE(shell ${SRC})
|
||||||
|
|
||||||
# IF (TD_PAGMODE_LITE)
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(shell taos_static)
|
||||||
|
ELSE ()
|
||||||
TARGET_LINK_LIBRARIES(shell taos)
|
TARGET_LINK_LIBRARIES(shell taos)
|
||||||
# ELSE ()
|
ENDIF ()
|
||||||
# TARGET_LINK_LIBRARIES(shell taos_static)
|
|
||||||
# ENDIF ()
|
|
||||||
|
|
||||||
SET_TARGET_PROPERTIES(shell PROPERTIES OUTPUT_NAME taos)
|
SET_TARGET_PROPERTIES(shell PROPERTIES OUTPUT_NAME taos)
|
||||||
ELSEIF (TD_WINDOWS)
|
ELSEIF (TD_WINDOWS)
|
||||||
|
|
|
@ -8,11 +8,11 @@ IF (TD_LINUX)
|
||||||
AUX_SOURCE_DIRECTORY(. SRC)
|
AUX_SOURCE_DIRECTORY(. SRC)
|
||||||
ADD_EXECUTABLE(taosdemo ${SRC})
|
ADD_EXECUTABLE(taosdemo ${SRC})
|
||||||
|
|
||||||
# IF (TD_PAGMODE_LITE)
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(taosdemo taos_static)
|
||||||
|
ELSE ()
|
||||||
TARGET_LINK_LIBRARIES(taosdemo taos)
|
TARGET_LINK_LIBRARIES(taosdemo taos)
|
||||||
# ELSE ()
|
ENDIF ()
|
||||||
# TARGET_LINK_LIBRARIES(taosdemo taos_static)
|
|
||||||
# ENDIF ()
|
|
||||||
ELSEIF (TD_WINDOWS)
|
ELSEIF (TD_WINDOWS)
|
||||||
AUX_SOURCE_DIRECTORY(. SRC)
|
AUX_SOURCE_DIRECTORY(. SRC)
|
||||||
ADD_EXECUTABLE(taosdemo ${SRC})
|
ADD_EXECUTABLE(taosdemo ${SRC})
|
||||||
|
|
|
@ -636,6 +636,7 @@ static int32_t mnodeRetrieveDbs(SShowObj *pShow, char *data, int32_t rows, void
|
||||||
while (numOfRows < rows) {
|
while (numOfRows < rows) {
|
||||||
pShow->pIter = mnodeGetNextDb(pShow->pIter, &pDb);
|
pShow->pIter = mnodeGetNextDb(pShow->pIter, &pDb);
|
||||||
if (pDb == NULL) break;
|
if (pDb == NULL) break;
|
||||||
|
if (pDb->pAcct != pUser->pAcct) continue;
|
||||||
|
|
||||||
cols = 0;
|
cols = 0;
|
||||||
|
|
||||||
|
@ -687,8 +688,8 @@ static int32_t mnodeRetrieveDbs(SShowObj *pShow, char *data, int32_t rows, void
|
||||||
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
|
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
|
||||||
|
|
||||||
char tmp[128] = {0};
|
char tmp[128] = {0};
|
||||||
size_t n = sprintf(tmp, "%d,%d,%d", pDb->cfg.daysToKeep1, pDb->cfg.daysToKeep2, pDb->cfg.daysToKeep);
|
sprintf(tmp, "%d,%d,%d", pDb->cfg.daysToKeep1, pDb->cfg.daysToKeep2, pDb->cfg.daysToKeep);
|
||||||
STR_WITH_SIZE_TO_VARSTR(pWrite, tmp, n);
|
STR_WITH_SIZE_TO_VARSTR(pWrite, tmp, strlen(tmp));
|
||||||
cols++;
|
cols++;
|
||||||
|
|
||||||
#ifndef __CLOUD_VERSION__
|
#ifndef __CLOUD_VERSION__
|
||||||
|
|
|
@ -581,8 +581,8 @@ void mnodeDropAllUsers(SAcctObj *pAcct) {
|
||||||
int32_t mnodeRetriveAuth(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
|
int32_t mnodeRetriveAuth(char *user, char *spi, char *encrypt, char *secret, char *ckey) {
|
||||||
if (!sdbIsMaster()) {
|
if (!sdbIsMaster()) {
|
||||||
*secret = 0;
|
*secret = 0;
|
||||||
mDebug("user:%s, failed to auth user, reason:%s", user, tstrerror(TSDB_CODE_RPC_NOT_READY));
|
mDebug("user:%s, failed to auth user, reason:%s", user, tstrerror(TSDB_CODE_APP_NOT_READY));
|
||||||
return TSDB_CODE_RPC_NOT_READY;
|
return TSDB_CODE_APP_NOT_READY;
|
||||||
}
|
}
|
||||||
|
|
||||||
SUserObj *pUser = mnodeGetUser(user);
|
SUserObj *pUser = mnodeGetUser(user);
|
||||||
|
|
|
@ -25,6 +25,7 @@ extern "C" {
|
||||||
#define taosSendto(sockfd, buf, len, flags, dest_addr, addrlen) sendto(sockfd, buf, len, flags, dest_addr, addrlen)
|
#define taosSendto(sockfd, buf, len, flags, dest_addr, addrlen) sendto(sockfd, buf, len, flags, dest_addr, addrlen)
|
||||||
#define taosReadSocket(fd, buf, len) read(fd, buf, len)
|
#define taosReadSocket(fd, buf, len) read(fd, buf, len)
|
||||||
#define taosWriteSocket(fd, buf, len) write(fd, buf, len)
|
#define taosWriteSocket(fd, buf, len) write(fd, buf, len)
|
||||||
|
#define taosCloseSocketNoCheck(x) close(x)
|
||||||
#define taosCloseSocket(x) \
|
#define taosCloseSocket(x) \
|
||||||
{ \
|
{ \
|
||||||
if (FD_VALID(x)) { \
|
if (FD_VALID(x)) { \
|
||||||
|
|
|
@ -90,6 +90,7 @@ extern "C" {
|
||||||
#define taosSendto(sockfd, buf, len, flags, dest_addr, addrlen) sendto((SOCKET)sockfd, buf, len, flags, dest_addr, addrlen)
|
#define taosSendto(sockfd, buf, len, flags, dest_addr, addrlen) sendto((SOCKET)sockfd, buf, len, flags, dest_addr, addrlen)
|
||||||
#define taosWriteSocket(fd, buf, len) send((SOCKET)fd, buf, len, 0)
|
#define taosWriteSocket(fd, buf, len) send((SOCKET)fd, buf, len, 0)
|
||||||
#define taosReadSocket(fd, buf, len) recv((SOCKET)fd, buf, len, 0)
|
#define taosReadSocket(fd, buf, len) recv((SOCKET)fd, buf, len, 0)
|
||||||
|
#define taosCloseSocketNoCheck(fd) closesocket((SOCKET)fd)
|
||||||
#define taosCloseSocket(fd) closesocket((SOCKET)fd)
|
#define taosCloseSocket(fd) closesocket((SOCKET)fd)
|
||||||
typedef SOCKET eventfd_t;
|
typedef SOCKET eventfd_t;
|
||||||
#define eventfd(a, b) -1
|
#define eventfd(a, b) -1
|
||||||
|
@ -229,8 +230,12 @@ void wordfree(wordexp_t *pwordexp);
|
||||||
#define atomic_exchange_16(ptr, val) _InterlockedExchange16((short volatile*)(ptr), (short)(val))
|
#define atomic_exchange_16(ptr, val) _InterlockedExchange16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_exchange_32(ptr, val) _InterlockedExchange((long volatile*)(ptr), (long)(val))
|
#define atomic_exchange_32(ptr, val) _InterlockedExchange((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_exchange_64(ptr, val) _InterlockedExchange64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_exchange_64(ptr, val) _InterlockedExchange64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#define atomic_exchange_ptr(ptr, val) _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
|
#ifdef _WIN64
|
||||||
|
#define atomic_exchange_ptr(ptr, val) _InterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
|
||||||
|
#else
|
||||||
|
#define atomic_exchange_ptr(ptr, val) _InlineInterlockedExchangePointer((void* volatile*)(ptr), (void*)(val))
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef _TD_GO_DLL_
|
#ifdef _TD_GO_DLL_
|
||||||
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
|
#define atomic_val_compare_exchange_8 __sync_val_compare_and_swap
|
||||||
#else
|
#else
|
||||||
|
@ -246,162 +251,104 @@ void wordfree(wordexp_t *pwordexp);
|
||||||
long interlocked_add_fetch_32(long volatile *ptr, long val);
|
long interlocked_add_fetch_32(long volatile *ptr, long val);
|
||||||
__int64 interlocked_add_fetch_64(__int64 volatile *ptr, __int64 val);
|
__int64 interlocked_add_fetch_64(__int64 volatile *ptr, __int64 val);
|
||||||
|
|
||||||
|
char interlocked_and_fetch_8(char volatile* ptr, char val);
|
||||||
|
short interlocked_and_fetch_16(short volatile* ptr, short val);
|
||||||
|
long interlocked_and_fetch_32(long volatile* ptr, long val);
|
||||||
|
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||||
|
|
||||||
|
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val);
|
||||||
|
|
||||||
|
char interlocked_or_fetch_8(char volatile* ptr, char val);
|
||||||
|
short interlocked_or_fetch_16(short volatile* ptr, short val);
|
||||||
|
long interlocked_or_fetch_32(long volatile* ptr, long val);
|
||||||
|
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||||
|
|
||||||
|
char interlocked_xor_fetch_8(char volatile* ptr, char val);
|
||||||
|
short interlocked_xor_fetch_16(short volatile* ptr, short val);
|
||||||
|
long interlocked_xor_fetch_32(long volatile* ptr, long val);
|
||||||
|
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val);
|
||||||
|
|
||||||
|
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val);
|
||||||
|
|
||||||
#define atomic_add_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), (char)(val))
|
#define atomic_add_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), (char)(val))
|
||||||
#define atomic_add_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), (short)(val))
|
#define atomic_add_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_add_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), (long)(val))
|
#define atomic_add_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_add_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_add_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_add_fetch_ptr atomic_add_fetch_64
|
|
||||||
#else
|
|
||||||
#define atomic_add_fetch_ptr atomic_add_fetch_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef _TD_GO_DLL_
|
#ifdef _TD_GO_DLL_
|
||||||
#define atomic_fetch_add_8 __sync_fetch_and_ad
|
#define atomic_fetch_add_8 __sync_fetch_and_ad
|
||||||
#define atomic_fetch_add_16 __sync_fetch_and_add
|
#define atomic_fetch_add_16 __sync_fetch_and_add
|
||||||
#else
|
#else
|
||||||
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
|
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
|
||||||
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
|
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
|
||||||
#endif
|
#endif
|
||||||
|
#define atomic_fetch_add_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), (char)(val))
|
||||||
|
#define atomic_fetch_add_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_fetch_add_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), (long)(val))
|
#define atomic_fetch_add_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_fetch_add_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_fetch_add_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_fetch_add_ptr atomic_fetch_add_64
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_add_ptr atomic_fetch_add_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define atomic_sub_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), -(char)(val))
|
#define atomic_sub_fetch_8(ptr, val) interlocked_add_fetch_8((char volatile*)(ptr), -(char)(val))
|
||||||
#define atomic_sub_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), -(short)(val))
|
#define atomic_sub_fetch_16(ptr, val) interlocked_add_fetch_16((short volatile*)(ptr), -(short)(val))
|
||||||
#define atomic_sub_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), -(long)(val))
|
#define atomic_sub_fetch_32(ptr, val) interlocked_add_fetch_32((long volatile*)(ptr), -(long)(val))
|
||||||
#define atomic_sub_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), -(__int64)(val))
|
#define atomic_sub_fetch_64(ptr, val) interlocked_add_fetch_64((__int64 volatile*)(ptr), -(__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_sub_fetch_ptr atomic_sub_fetch_64
|
|
||||||
#else
|
|
||||||
#define atomic_sub_fetch_ptr atomic_sub_fetch_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define atomic_fetch_sub_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), -(char)(val))
|
#define atomic_fetch_sub_8(ptr, val) _InterlockedExchangeAdd8((char volatile*)(ptr), -(char)(val))
|
||||||
#define atomic_fetch_sub_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), -(short)(val))
|
#define atomic_fetch_sub_16(ptr, val) _InterlockedExchangeAdd16((short volatile*)(ptr), -(short)(val))
|
||||||
#define atomic_fetch_sub_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), -(long)(val))
|
#define atomic_fetch_sub_32(ptr, val) _InterlockedExchangeAdd((long volatile*)(ptr), -(long)(val))
|
||||||
#define atomic_fetch_sub_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), -(__int64)(val))
|
#define atomic_fetch_sub_64(ptr, val) _InterlockedExchangeAdd64((__int64 volatile*)(ptr), -(__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_fetch_sub_ptr atomic_fetch_sub_64
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_sub_ptr atomic_fetch_sub_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
#define atomic_and_fetch_8(ptr, val) interlocked_and_fetch_8((char volatile*)(ptr), (char)(val))
|
||||||
char interlocked_and_fetch_8(char volatile* ptr, char val);
|
#define atomic_and_fetch_16(ptr, val) interlocked_and_fetch_16((short volatile*)(ptr), (short)(val))
|
||||||
short interlocked_and_fetch_16(short volatile* ptr, short val);
|
|
||||||
#endif
|
|
||||||
long interlocked_and_fetch_32(long volatile* ptr, long val);
|
|
||||||
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val);
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
#define atomic_and_fetch_8(ptr, val) interlocked_and_fetch_8((char volatile*)(ptr), (char)(val))
|
|
||||||
#define atomic_and_fetch_16(ptr, val) interlocked_and_fetch_16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_and_fetch_32(ptr, val) interlocked_and_fetch_32((long volatile*)(ptr), (long)(val))
|
#define atomic_and_fetch_32(ptr, val) interlocked_and_fetch_32((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_and_fetch_64(ptr, val) interlocked_and_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_and_fetch_64(ptr, val) interlocked_and_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_and_fetch_ptr atomic_and_fetch_64
|
#define atomic_fetch_and_8(ptr, val) _InterlockedAnd8((char volatile*)(ptr), (char)(val))
|
||||||
#else
|
#define atomic_fetch_and_16(ptr, val) _InterlockedAnd16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_and_fetch_ptr atomic_and_fetch_32
|
|
||||||
#endif
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
#define atomic_fetch_and_8(ptr, val) _InterlockedAnd8((char volatile*)(ptr), (char)(val))
|
|
||||||
#define atomic_fetch_and_16(ptr, val) _InterlockedAnd16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_fetch_and_32(ptr, val) _InterlockedAnd((long volatile*)(ptr), (long)(val))
|
#define atomic_fetch_and_32(ptr, val) _InterlockedAnd((long volatile*)(ptr), (long)(val))
|
||||||
|
#define atomic_fetch_and_64(ptr, val) interlocked_fetch_and_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
|
|
||||||
#ifdef _M_IX86
|
#define atomic_or_fetch_8(ptr, val) interlocked_or_fetch_8((char volatile*)(ptr), (char)(val))
|
||||||
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val);
|
#define atomic_or_fetch_16(ptr, val) interlocked_or_fetch_16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_fetch_and_64(ptr, val) interlocked_fetch_and_64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_and_64(ptr, val) _InterlockedAnd64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_fetch_and_ptr atomic_fetch_and_64
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_and_ptr atomic_fetch_and_32
|
|
||||||
#endif
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
char interlocked_or_fetch_8(char volatile* ptr, char val);
|
|
||||||
short interlocked_or_fetch_16(short volatile* ptr, short val);
|
|
||||||
#endif
|
|
||||||
long interlocked_or_fetch_32(long volatile* ptr, long val);
|
|
||||||
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val);
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
#define atomic_or_fetch_8(ptr, val) interlocked_or_fetch_8((char volatile*)(ptr), (char)(val))
|
|
||||||
#define atomic_or_fetch_16(ptr, val) interlocked_or_fetch_16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_or_fetch_32(ptr, val) interlocked_or_fetch_32((long volatile*)(ptr), (long)(val))
|
#define atomic_or_fetch_32(ptr, val) interlocked_or_fetch_32((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_or_fetch_64(ptr, val) interlocked_or_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_or_fetch_64(ptr, val) interlocked_or_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_or_fetch_ptr atomic_or_fetch_64
|
#define atomic_fetch_or_8(ptr, val) _InterlockedOr8((char volatile*)(ptr), (char)(val))
|
||||||
#else
|
#define atomic_fetch_or_16(ptr, val) _InterlockedOr16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_or_fetch_ptr atomic_or_fetch_32
|
|
||||||
#endif
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
#define atomic_fetch_or_8(ptr, val) _InterlockedOr8((char volatile*)(ptr), (char)(val))
|
|
||||||
#define atomic_fetch_or_16(ptr, val) _InterlockedOr16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_fetch_or_32(ptr, val) _InterlockedOr((long volatile*)(ptr), (long)(val))
|
#define atomic_fetch_or_32(ptr, val) _InterlockedOr((long volatile*)(ptr), (long)(val))
|
||||||
|
#define atomic_fetch_or_64(ptr, val) interlocked_fetch_or_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
|
|
||||||
#ifdef _M_IX86
|
#define atomic_xor_fetch_8(ptr, val) interlocked_xor_fetch_8((char volatile*)(ptr), (char)(val))
|
||||||
__int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val);
|
#define atomic_xor_fetch_16(ptr, val) interlocked_xor_fetch_16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_fetch_or_64(ptr, val) interlocked_fetch_or_64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_or_64(ptr, val) _InterlockedOr64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_fetch_or_ptr atomic_fetch_or_64
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_or_ptr atomic_fetch_or_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
char interlocked_xor_fetch_8(char volatile* ptr, char val);
|
|
||||||
short interlocked_xor_fetch_16(short volatile* ptr, short val);
|
|
||||||
#endif
|
|
||||||
long interlocked_xor_fetch_32(long volatile* ptr, long val);
|
|
||||||
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val);
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
#define atomic_xor_fetch_8(ptr, val) interlocked_xor_fetch_8((char volatile*)(ptr), (char)(val))
|
|
||||||
#define atomic_xor_fetch_16(ptr, val) interlocked_xor_fetch_16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_xor_fetch_32(ptr, val) interlocked_xor_fetch_32((long volatile*)(ptr), (long)(val))
|
#define atomic_xor_fetch_32(ptr, val) interlocked_xor_fetch_32((long volatile*)(ptr), (long)(val))
|
||||||
#define atomic_xor_fetch_64(ptr, val) interlocked_xor_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
#define atomic_xor_fetch_64(ptr, val) interlocked_xor_fetch_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _WIN64
|
|
||||||
#define atomic_xor_fetch_ptr atomic_xor_fetch_64
|
|
||||||
#else
|
|
||||||
#define atomic_xor_fetch_ptr atomic_xor_fetch_32
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef _TD_GO_DLL_
|
#define atomic_fetch_xor_8(ptr, val) _InterlockedXor8((char volatile*)(ptr), (char)(val))
|
||||||
#define atomic_fetch_xor_8(ptr, val) _InterlockedXor8((char volatile*)(ptr), (char)(val))
|
#define atomic_fetch_xor_16(ptr, val) _InterlockedXor16((short volatile*)(ptr), (short)(val))
|
||||||
#define atomic_fetch_xor_16(ptr, val) _InterlockedXor16((short volatile*)(ptr), (short)(val))
|
|
||||||
#endif
|
|
||||||
#define atomic_fetch_xor_32(ptr, val) _InterlockedXor((long volatile*)(ptr), (long)(val))
|
#define atomic_fetch_xor_32(ptr, val) _InterlockedXor((long volatile*)(ptr), (long)(val))
|
||||||
|
#define atomic_fetch_xor_64(ptr, val) interlocked_fetch_xor_64((__int64 volatile*)(ptr), (__int64)(val))
|
||||||
#ifdef _M_IX86
|
|
||||||
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val);
|
|
||||||
#define atomic_fetch_xor_64(ptr, val) interlocked_fetch_xor_64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#else
|
|
||||||
#define atomic_fetch_xor_64(ptr, val) _InterlockedXor64((__int64 volatile*)(ptr), (__int64)(val))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef _WIN64
|
#ifdef _WIN64
|
||||||
|
#define atomic_add_fetch_ptr atomic_add_fetch_64
|
||||||
|
#define atomic_fetch_add_ptr atomic_fetch_add_64
|
||||||
|
#define atomic_sub_fetch_ptr atomic_sub_fetch_64
|
||||||
|
#define atomic_fetch_sub_ptr atomic_fetch_sub_64
|
||||||
|
#define atomic_and_fetch_ptr atomic_and_fetch_64
|
||||||
|
#define atomic_fetch_and_ptr atomic_fetch_and_64
|
||||||
|
#define atomic_or_fetch_ptr atomic_or_fetch_64
|
||||||
|
#define atomic_fetch_or_ptr atomic_fetch_or_64
|
||||||
|
#define atomic_xor_fetch_ptr atomic_xor_fetch_64
|
||||||
#define atomic_fetch_xor_ptr atomic_fetch_xor_64
|
#define atomic_fetch_xor_ptr atomic_fetch_xor_64
|
||||||
#else
|
#else
|
||||||
|
#define atomic_add_fetch_ptr atomic_add_fetch_32
|
||||||
|
#define atomic_fetch_add_ptr atomic_fetch_add_32
|
||||||
|
#define atomic_sub_fetch_ptr atomic_sub_fetch_32
|
||||||
|
#define atomic_fetch_sub_ptr atomic_fetch_sub_32
|
||||||
|
#define atomic_and_fetch_ptr atomic_and_fetch_32
|
||||||
|
#define atomic_fetch_and_ptr atomic_fetch_and_32
|
||||||
|
#define atomic_or_fetch_ptr atomic_or_fetch_32
|
||||||
|
#define atomic_fetch_or_ptr atomic_fetch_or_32
|
||||||
|
#define atomic_xor_fetch_ptr atomic_xor_fetch_32
|
||||||
#define atomic_fetch_xor_ptr atomic_fetch_xor_32
|
#define atomic_fetch_xor_ptr atomic_fetch_xor_32
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -547,12 +547,12 @@ int taosSystem(const char *cmd) {
|
||||||
int res;
|
int res;
|
||||||
char buf[1024];
|
char buf[1024];
|
||||||
if (cmd == NULL) {
|
if (cmd == NULL) {
|
||||||
uError("taosSystem cmd is NULL!\n");
|
uError("taosSystem cmd is NULL!");
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((fp = popen(cmd, "r")) == NULL) {
|
if ((fp = popen(cmd, "r")) == NULL) {
|
||||||
uError("popen cmd:%s error: %s/n", cmd, strerror(errno));
|
uError("popen cmd:%s error: %s", cmd, strerror(errno));
|
||||||
return -1;
|
return -1;
|
||||||
} else {
|
} else {
|
||||||
while (fgets(buf, sizeof(buf), fp)) {
|
while (fgets(buf, sizeof(buf), fp)) {
|
||||||
|
@ -560,9 +560,9 @@ int taosSystem(const char *cmd) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((res = pclose(fp)) == -1) {
|
if ((res = pclose(fp)) == -1) {
|
||||||
uError("close popen file pointer fp error!\n");
|
uError("close popen file pointer fp error!");
|
||||||
} else {
|
} else {
|
||||||
uDebug("popen res is :%d\n", res);
|
uDebug("popen res is :%d", res);
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
|
|
@ -23,19 +23,19 @@
|
||||||
|
|
||||||
// add
|
// add
|
||||||
char interlocked_add_fetch_8(char volatile* ptr, char val) {
|
char interlocked_add_fetch_8(char volatile* ptr, char val) {
|
||||||
#ifdef _TD_GO_DLL_
|
#ifdef _TD_GO_DLL_
|
||||||
return __sync_fetch_and_add(ptr, val) + val;
|
return __sync_fetch_and_add(ptr, val) + val;
|
||||||
#else
|
#else
|
||||||
return _InterlockedExchangeAdd8(ptr, val) + val;
|
return _InterlockedExchangeAdd8(ptr, val) + val;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
short interlocked_add_fetch_16(short volatile* ptr, short val) {
|
short interlocked_add_fetch_16(short volatile* ptr, short val) {
|
||||||
#ifdef _TD_GO_DLL_
|
#ifdef _TD_GO_DLL_
|
||||||
return __sync_fetch_and_add(ptr, val) + val;
|
return __sync_fetch_and_add(ptr, val) + val;
|
||||||
#else
|
#else
|
||||||
return _InterlockedExchangeAdd16(ptr, val) + val;
|
return _InterlockedExchangeAdd16(ptr, val) + val;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
long interlocked_add_fetch_32(long volatile* ptr, long val) {
|
long interlocked_add_fetch_32(long volatile* ptr, long val) {
|
||||||
|
@ -43,15 +43,13 @@ long interlocked_add_fetch_32(long volatile* ptr, long val) {
|
||||||
}
|
}
|
||||||
|
|
||||||
__int64 interlocked_add_fetch_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_add_fetch_64(__int64 volatile* ptr, __int64 val) {
|
||||||
#ifdef _WIN64
|
//#ifdef _WIN64
|
||||||
return _InterlockedExchangeAdd64(ptr, val) + val;
|
return _InterlockedExchangeAdd64(ptr, val) + val;
|
||||||
#else
|
//#else
|
||||||
return _InterlockedExchangeAdd(ptr, val) + val;
|
// return _InterlockedExchangeAdd(ptr, val) + val;
|
||||||
#endif
|
//#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// and
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
char interlocked_and_fetch_8(char volatile* ptr, char val) {
|
char interlocked_and_fetch_8(char volatile* ptr, char val) {
|
||||||
return _InterlockedAnd8(ptr, val) & val;
|
return _InterlockedAnd8(ptr, val) & val;
|
||||||
}
|
}
|
||||||
|
@ -59,41 +57,37 @@ char interlocked_and_fetch_8(char volatile* ptr, char val) {
|
||||||
short interlocked_and_fetch_16(short volatile* ptr, short val) {
|
short interlocked_and_fetch_16(short volatile* ptr, short val) {
|
||||||
return _InterlockedAnd16(ptr, val) & val;
|
return _InterlockedAnd16(ptr, val) & val;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
long interlocked_and_fetch_32(long volatile* ptr, long val) {
|
long interlocked_and_fetch_32(long volatile* ptr, long val) {
|
||||||
return _InterlockedAnd(ptr, val) & val;
|
return _InterlockedAnd(ptr, val) & val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) {
|
||||||
#ifndef _M_IX86
|
#ifndef _M_IX86
|
||||||
|
|
||||||
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) {
|
|
||||||
return _InterlockedAnd64(ptr, val) & val;
|
return _InterlockedAnd64(ptr, val) & val;
|
||||||
}
|
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
__int64 interlocked_and_fetch_64(__int64 volatile* ptr, __int64 val) {
|
|
||||||
__int64 old, res;
|
__int64 old, res;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
res = old & val;
|
res = old & val;
|
||||||
} while(_InterlockedCompareExchange64(ptr, res, old) != old);
|
} while (_InterlockedCompareExchange64(ptr, res, old) != old);
|
||||||
return res;
|
return res;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_fetch_and_64(__int64 volatile* ptr, __int64 val) {
|
||||||
|
#ifdef _M_IX86
|
||||||
__int64 old;
|
__int64 old;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
} while(_InterlockedCompareExchange64(ptr, old & val, old) != old);
|
} while (_InterlockedCompareExchange64(ptr, old & val, old) != old);
|
||||||
return old;
|
return old;
|
||||||
|
#else
|
||||||
|
return _InterlockedAnd64((__int64 volatile*)(ptr), (__int64)(val));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// or
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
char interlocked_or_fetch_8(char volatile* ptr, char val) {
|
char interlocked_or_fetch_8(char volatile* ptr, char val) {
|
||||||
return _InterlockedOr8(ptr, val) | val;
|
return _InterlockedOr8(ptr, val) | val;
|
||||||
}
|
}
|
||||||
|
@ -101,40 +95,36 @@ char interlocked_or_fetch_8(char volatile* ptr, char val) {
|
||||||
short interlocked_or_fetch_16(short volatile* ptr, short val) {
|
short interlocked_or_fetch_16(short volatile* ptr, short val) {
|
||||||
return _InterlockedOr16(ptr, val) | val;
|
return _InterlockedOr16(ptr, val) | val;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
long interlocked_or_fetch_32(long volatile* ptr, long val) {
|
long interlocked_or_fetch_32(long volatile* ptr, long val) {
|
||||||
return _InterlockedOr(ptr, val) | val;
|
return _InterlockedOr(ptr, val) | val;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifndef _M_IX86
|
|
||||||
|
|
||||||
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) {
|
|
||||||
return _InterlockedOr64(ptr, val) & val;
|
|
||||||
}
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_or_fetch_64(__int64 volatile* ptr, __int64 val) {
|
||||||
|
#ifdef _M_IX86
|
||||||
__int64 old, res;
|
__int64 old, res;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
res = old | val;
|
res = old | val;
|
||||||
} while(_InterlockedCompareExchange64(ptr, res, old) != old);
|
} while(_InterlockedCompareExchange64(ptr, res, old) != old);
|
||||||
return res;
|
return res;
|
||||||
|
#else
|
||||||
|
return _InterlockedOr64(ptr, val) & val;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
__int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_fetch_or_64(__int64 volatile* ptr, __int64 val) {
|
||||||
|
#ifdef _M_IX86
|
||||||
__int64 old;
|
__int64 old;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
} while(_InterlockedCompareExchange64(ptr, old | val, old) != old);
|
} while(_InterlockedCompareExchange64(ptr, old | val, old) != old);
|
||||||
return old;
|
return old;
|
||||||
|
#else
|
||||||
|
return _InterlockedOr64((__int64 volatile*)(ptr), (__int64)(val));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// xor
|
|
||||||
#ifndef _TD_GO_DLL_
|
|
||||||
char interlocked_xor_fetch_8(char volatile* ptr, char val) {
|
char interlocked_xor_fetch_8(char volatile* ptr, char val) {
|
||||||
return _InterlockedXor8(ptr, val) ^ val;
|
return _InterlockedXor8(ptr, val) ^ val;
|
||||||
}
|
}
|
||||||
|
@ -142,35 +132,33 @@ char interlocked_xor_fetch_8(char volatile* ptr, char val) {
|
||||||
short interlocked_xor_fetch_16(short volatile* ptr, short val) {
|
short interlocked_xor_fetch_16(short volatile* ptr, short val) {
|
||||||
return _InterlockedXor16(ptr, val) ^ val;
|
return _InterlockedXor16(ptr, val) ^ val;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
long interlocked_xor_fetch_32(long volatile* ptr, long val) {
|
long interlocked_xor_fetch_32(long volatile* ptr, long val) {
|
||||||
return _InterlockedXor(ptr, val) ^ val;
|
return _InterlockedXor(ptr, val) ^ val;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifndef _M_IX86
|
|
||||||
|
|
||||||
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) {
|
|
||||||
return _InterlockedXor64(ptr, val) ^ val;
|
|
||||||
}
|
|
||||||
|
|
||||||
#else
|
|
||||||
|
|
||||||
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_xor_fetch_64(__int64 volatile* ptr, __int64 val) {
|
||||||
|
#ifdef _M_IX86
|
||||||
__int64 old, res;
|
__int64 old, res;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
res = old ^ val;
|
res = old ^ val;
|
||||||
} while(_InterlockedCompareExchange64(ptr, res, old) != old);
|
} while(_InterlockedCompareExchange64(ptr, res, old) != old);
|
||||||
return res;
|
return res;
|
||||||
|
#else
|
||||||
|
return _InterlockedXor64(ptr, val) ^ val;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val) {
|
__int64 interlocked_fetch_xor_64(__int64 volatile* ptr, __int64 val) {
|
||||||
|
#ifdef _M_IX86
|
||||||
__int64 old;
|
__int64 old;
|
||||||
do {
|
do {
|
||||||
old = *ptr;
|
old = *ptr;
|
||||||
} while(_InterlockedCompareExchange64(ptr, old ^ val, old) != old);
|
} while (_InterlockedCompareExchange64(ptr, old ^ val, old) != old);
|
||||||
return old;
|
return old;
|
||||||
|
#else
|
||||||
|
return _InterlockedXor64((__int64 volatile*)(ptr), (__int64)(val));
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
|
@ -154,13 +154,15 @@ bool taosGetCpuUsage(float *sysCpuUsage, float *procCpuUsage) {
|
||||||
|
|
||||||
bool taosGetProcMemory(float *memoryUsedMB) {
|
bool taosGetProcMemory(float *memoryUsedMB) {
|
||||||
unsigned bytes_used = 0;
|
unsigned bytes_used = 0;
|
||||||
|
#if 0
|
||||||
#if defined(_WIN32) && defined(_MSC_VER)
|
#if defined(_WIN32) && defined(_MSC_VER)
|
||||||
PROCESS_MEMORY_COUNTERS pmc;
|
PROCESS_MEMORY_COUNTERS pmc;
|
||||||
HANDLE cur_proc = GetCurrentProcess();
|
HANDLE cur_proc = GetCurrentProcess();
|
||||||
|
|
||||||
if (GetProcessMemoryInfo(cur_proc, &pmc, sizeof(pmc))) {
|
if (GetProcessMemoryInfo(cur_proc, &pmc, sizeof(pmc))) {
|
||||||
bytes_used = (unsigned)(pmc.WorkingSetSize + pmc.PagefileUsage);
|
bytes_used = (unsigned)(pmc.WorkingSetSize + pmc.PagefileUsage);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
*memoryUsedMB = (float)bytes_used / 1024 / 1024;
|
*memoryUsedMB = (float)bytes_used / 1024 / 1024;
|
||||||
|
|
|
@ -11,7 +11,12 @@ AUX_SOURCE_DIRECTORY(src SRC)
|
||||||
|
|
||||||
IF (TD_LINUX)
|
IF (TD_LINUX)
|
||||||
ADD_LIBRARY(http ${SRC})
|
ADD_LIBRARY(http ${SRC})
|
||||||
TARGET_LINK_LIBRARIES(http taos z)
|
|
||||||
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(http taos_static z)
|
||||||
|
ELSE ()
|
||||||
|
TARGET_LINK_LIBRARIES(http taos z)
|
||||||
|
ENDIF ()
|
||||||
|
|
||||||
IF (TD_ADMIN)
|
IF (TD_ADMIN)
|
||||||
TARGET_LINK_LIBRARIES(http admin)
|
TARGET_LINK_LIBRARIES(http admin)
|
||||||
|
|
|
@ -8,5 +8,10 @@ IF (TD_LINUX)
|
||||||
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc)
|
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/client/inc)
|
||||||
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/query/inc)
|
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/src/query/inc)
|
||||||
ADD_LIBRARY(monitor ${SRC})
|
ADD_LIBRARY(monitor ${SRC})
|
||||||
TARGET_LINK_LIBRARIES(monitor taos)
|
|
||||||
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(monitor taos_static)
|
||||||
|
ELSE ()
|
||||||
|
TARGET_LINK_LIBRARIES(monitor taos)
|
||||||
|
ENDIF ()
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
|
@ -10,8 +10,12 @@ IF (TD_LINUX)
|
||||||
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/MQTT-C/include)
|
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/MQTT-C/include)
|
||||||
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/MQTT-C/examples/templates)
|
INCLUDE_DIRECTORIES(${TD_COMMUNITY_DIR}/deps/MQTT-C/examples/templates)
|
||||||
ADD_LIBRARY(mqtt ${SRC})
|
ADD_LIBRARY(mqtt ${SRC})
|
||||||
TARGET_LINK_LIBRARIES(mqtt taos cJson mqttc)
|
|
||||||
|
|
||||||
|
IF (TD_SOMODE_STATIC)
|
||||||
|
TARGET_LINK_LIBRARIES(mqtt taos_static cJson mqttc)
|
||||||
|
ELSE ()
|
||||||
|
TARGET_LINK_LIBRARIES(mqtt taos cJson mqttc)
|
||||||
|
ENDIF ()
|
||||||
IF (TD_ADMIN)
|
IF (TD_ADMIN)
|
||||||
TARGET_LINK_LIBRARIES(mqtt admin cJson)
|
TARGET_LINK_LIBRARIES(mqtt admin cJson)
|
||||||
ENDIF ()
|
ENDIF ()
|
||||||
|
|
|
@ -39,6 +39,7 @@ int mttIsRuning = 1;
|
||||||
|
|
||||||
int32_t mqttInitSystem() {
|
int32_t mqttInitSystem() {
|
||||||
int rc = 0;
|
int rc = 0;
|
||||||
|
#if 0
|
||||||
uint8_t sendbuf[2048];
|
uint8_t sendbuf[2048];
|
||||||
uint8_t recvbuf[1024];
|
uint8_t recvbuf[1024];
|
||||||
recntStatus.sendbuf = sendbuf;
|
recntStatus.sendbuf = sendbuf;
|
||||||
|
@ -47,7 +48,11 @@ int32_t mqttInitSystem() {
|
||||||
recntStatus.recvbufsz = sizeof(recvbuf);
|
recntStatus.recvbufsz = sizeof(recvbuf);
|
||||||
char* url = tsMqttBrokerAddress;
|
char* url = tsMqttBrokerAddress;
|
||||||
recntStatus.user_name = strstr(url, "@") != NULL ? strbetween(url, "//", ":") : NULL;
|
recntStatus.user_name = strstr(url, "@") != NULL ? strbetween(url, "//", ":") : NULL;
|
||||||
recntStatus.password = strstr(url, "@") != NULL ? strbetween(strstr(url, recntStatus.user_name), ":", "@") : NULL;
|
|
||||||
|
char * passStr = strstr(url, recntStatus.user_name);
|
||||||
|
if (passStr != NULL) {
|
||||||
|
recntStatus.password = strstr(url, "@") != NULL ? strbetween(passStr, ":", "@") : NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if (strlen(url) == 0) {
|
if (strlen(url) == 0) {
|
||||||
mqttDebug("mqtt module not init, url is null");
|
mqttDebug("mqtt module not init, url is null");
|
||||||
|
@ -91,11 +96,13 @@ int32_t mqttInitSystem() {
|
||||||
topicPath = NULL;
|
topicPath = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t mqttStartSystem() {
|
int32_t mqttStartSystem() {
|
||||||
int rc = 0;
|
int rc = 0;
|
||||||
|
#if 0
|
||||||
if (recntStatus.user_name != NULL && recntStatus.password != NULL) {
|
if (recntStatus.user_name != NULL && recntStatus.password != NULL) {
|
||||||
mqttInfo("connecting to mqtt://%s:%s@%s:%s/%s/", recntStatus.user_name, recntStatus.password,
|
mqttInfo("connecting to mqtt://%s:%s@%s:%s/%s/", recntStatus.user_name, recntStatus.password,
|
||||||
recntStatus.hostname, recntStatus.port, topicPath);
|
recntStatus.hostname, recntStatus.port, topicPath);
|
||||||
|
@ -112,18 +119,22 @@ int32_t mqttStartSystem() {
|
||||||
} else {
|
} else {
|
||||||
mqttInfo("listening for '%s' messages.", recntStatus.topic);
|
mqttInfo("listening for '%s' messages.", recntStatus.topic);
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
void mqttStopSystem() {
|
void mqttStopSystem() {
|
||||||
|
#if 0
|
||||||
mqttClient.error = MQTT_ERROR_SOCKET_ERROR;
|
mqttClient.error = MQTT_ERROR_SOCKET_ERROR;
|
||||||
mttIsRuning = 0;
|
mttIsRuning = 0;
|
||||||
usleep(300000U);
|
usleep(300000U);
|
||||||
mqttCleanup(EXIT_SUCCESS, mqttClient.socketfd, &clientDaemonThread);
|
mqttCleanup(EXIT_SUCCESS, mqttClient.socketfd, &clientDaemonThread);
|
||||||
mqttInfo("mqtt is stoped");
|
mqttInfo("mqtt is stoped");
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void mqttCleanUpSystem() {
|
void mqttCleanUpSystem() {
|
||||||
|
#if 0
|
||||||
mqttInfo("starting to cleanup mqtt");
|
mqttInfo("starting to cleanup mqtt");
|
||||||
free(recntStatus.user_name);
|
free(recntStatus.user_name);
|
||||||
free(recntStatus.password);
|
free(recntStatus.password);
|
||||||
|
@ -132,6 +143,7 @@ void mqttCleanUpSystem() {
|
||||||
free(recntStatus.topic);
|
free(recntStatus.topic);
|
||||||
free(topicPath);
|
free(topicPath);
|
||||||
mqttInfo("mqtt is cleaned up");
|
mqttInfo("mqtt is cleaned up");
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void mqtt_PublishCallback(void** unused, struct mqtt_response_publish* published) {
|
void mqtt_PublishCallback(void** unused, struct mqtt_response_publish* published) {
|
||||||
|
@ -183,9 +195,11 @@ void* mqttClientRefresher(void* client) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void mqttCleanup(int status, int sockfd, pthread_t* client_daemon) {
|
void mqttCleanup(int status, int sockfd, pthread_t* client_daemon) {
|
||||||
|
#if 0
|
||||||
mqttInfo("clean up mqtt module");
|
mqttInfo("clean up mqtt module");
|
||||||
if (sockfd != -1) close(sockfd);
|
if (sockfd != -1) close(sockfd);
|
||||||
if (client_daemon != NULL) pthread_cancel(*client_daemon);
|
if (client_daemon != NULL) pthread_cancel(*client_daemon);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void mqttInitConnCb(void* param, TAOS_RES* result, int32_t code) {
|
void mqttInitConnCb(void* param, TAOS_RES* result, int32_t code) {
|
||||||
|
|
|
@ -410,7 +410,7 @@ static SWindowResult *doSetTimeWindowFromKey(SQueryRuntimeEnv *pRuntimeEnv, SWin
|
||||||
newCap = (int64_t)(pWindowResInfo->capacity * 1.5);
|
newCap = (int64_t)(pWindowResInfo->capacity * 1.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
char *t = realloc(pWindowResInfo->pResult, newCap * sizeof(SWindowResult));
|
char *t = realloc(pWindowResInfo->pResult, (size_t)(newCap * sizeof(SWindowResult)));
|
||||||
pRuntimeEnv->summary.internalSupSize += (newCap - pWindowResInfo->capacity) * sizeof(SWindowResult);
|
pRuntimeEnv->summary.internalSupSize += (newCap - pWindowResInfo->capacity) * sizeof(SWindowResult);
|
||||||
|
|
||||||
if (t == NULL) {
|
if (t == NULL) {
|
||||||
|
@ -2257,7 +2257,7 @@ static void ensureOutputBuffer(SQueryRuntimeEnv* pRuntimeEnv, SDataBlockInfo* pB
|
||||||
if (tmp == NULL) { // todo handle the oom
|
if (tmp == NULL) { // todo handle the oom
|
||||||
assert(0);
|
assert(0);
|
||||||
} else {
|
} else {
|
||||||
memset(tmp + sizeof(tFilePage) + bytes * pRec->rows, 0, (newSize - pRec->rows) * bytes);
|
memset(tmp + sizeof(tFilePage) + bytes * pRec->rows, 0, (size_t)((newSize - pRec->rows) * bytes));
|
||||||
pQuery->sdata[i] = (tFilePage *)tmp;
|
pQuery->sdata[i] = (tFilePage *)tmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2725,7 +2725,7 @@ void copyResToQueryResultBuf(SQInfo *pQInfo, SQuery *pQuery) {
|
||||||
for (int32_t i = 0; i < pQuery->numOfOutput; ++i) {
|
for (int32_t i = 0; i < pQuery->numOfOutput; ++i) {
|
||||||
int32_t bytes = pRuntimeEnv->pCtx[i].outputBytes;
|
int32_t bytes = pRuntimeEnv->pCtx[i].outputBytes;
|
||||||
char * pDest = pQuery->sdata[i]->data;
|
char * pDest = pQuery->sdata[i]->data;
|
||||||
memcpy(pDest + offset * bytes, pData->data + pRuntimeEnv->offset[i] * pData->num, bytes * pData->num);
|
memcpy(pDest + offset * bytes, pData->data + pRuntimeEnv->offset[i] * pData->num, (size_t)(bytes * pData->num));
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += (int32_t)pData->num;
|
offset += (int32_t)pData->num;
|
||||||
|
@ -2946,7 +2946,7 @@ int32_t flushFromResultBuf(SQInfo *pQInfo) {
|
||||||
buf->num = r;
|
buf->num = r;
|
||||||
|
|
||||||
memcpy(buf->data + pRuntimeEnv->offset[i] * buf->num, ((char *)pQuery->sdata[i]->data) + offset * bytes,
|
memcpy(buf->data + pRuntimeEnv->offset[i] * buf->num, ((char *)pQuery->sdata[i]->data) + offset * bytes,
|
||||||
buf->num * bytes);
|
(size_t)(buf->num * bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += r;
|
offset += r;
|
||||||
|
@ -3119,7 +3119,7 @@ void resetCtxOutputBuf(SQueryRuntimeEnv *pRuntimeEnv) {
|
||||||
pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].aOutputBuf;
|
pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].aOutputBuf;
|
||||||
}
|
}
|
||||||
|
|
||||||
memset(pQuery->sdata[i]->data, 0, (size_t)pQuery->pSelectExpr[i].bytes * pQuery->rec.capacity);
|
memset(pQuery->sdata[i]->data, 0, (size_t)(pQuery->pSelectExpr[i].bytes * pQuery->rec.capacity));
|
||||||
}
|
}
|
||||||
|
|
||||||
initCtxOutputBuf(pRuntimeEnv);
|
initCtxOutputBuf(pRuntimeEnv);
|
||||||
|
@ -3198,7 +3198,7 @@ void skipResults(SQueryRuntimeEnv *pRuntimeEnv) {
|
||||||
int32_t functionId = pQuery->pSelectExpr[i].base.functionId;
|
int32_t functionId = pQuery->pSelectExpr[i].base.functionId;
|
||||||
int32_t bytes = pRuntimeEnv->pCtx[i].outputBytes;
|
int32_t bytes = pRuntimeEnv->pCtx[i].outputBytes;
|
||||||
|
|
||||||
memmove(pQuery->sdata[i]->data, (char*) pQuery->sdata[i]->data + bytes * numOfSkip, pQuery->rec.rows * bytes);
|
memmove(pQuery->sdata[i]->data, (char*)pQuery->sdata[i]->data + bytes * numOfSkip, (size_t)(pQuery->rec.rows * bytes));
|
||||||
pRuntimeEnv->pCtx[i].aOutputBuf = ((char*) pQuery->sdata[i]->data) + pQuery->rec.rows * bytes;
|
pRuntimeEnv->pCtx[i].aOutputBuf = ((char*) pQuery->sdata[i]->data) + pQuery->rec.rows * bytes;
|
||||||
|
|
||||||
if (functionId == TSDB_FUNC_DIFF || functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM) {
|
if (functionId == TSDB_FUNC_DIFF || functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM) {
|
||||||
|
@ -5242,7 +5242,6 @@ static int32_t getColumnIndexInSource(SQueryTableMsg *pQueryMsg, SSqlFuncMsg *pE
|
||||||
j += 1;
|
j += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(0);
|
assert(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5385,8 +5384,8 @@ static int32_t convertQueryMsg(SQueryTableMsg *pQueryMsg, SArray **pTableIdList,
|
||||||
if (pColFilter->filterstr) {
|
if (pColFilter->filterstr) {
|
||||||
pColFilter->len = htobe64(pFilterMsg->len);
|
pColFilter->len = htobe64(pFilterMsg->len);
|
||||||
|
|
||||||
pColFilter->pz = (int64_t) calloc(1, pColFilter->len + 1 * TSDB_NCHAR_SIZE); // note: null-terminator
|
pColFilter->pz = (int64_t)calloc(1, (size_t)(pColFilter->len + 1 * TSDB_NCHAR_SIZE)); // note: null-terminator
|
||||||
memcpy((void *)pColFilter->pz, pMsg, pColFilter->len);
|
memcpy((void *)pColFilter->pz, pMsg, (size_t)pColFilter->len);
|
||||||
pMsg += (pColFilter->len + 1);
|
pMsg += (pColFilter->len + 1);
|
||||||
} else {
|
} else {
|
||||||
pColFilter->lowerBndi = htobe64(pFilterMsg->lowerBndi);
|
pColFilter->lowerBndi = htobe64(pFilterMsg->lowerBndi);
|
||||||
|
@ -5890,7 +5889,7 @@ static SQInfo *createQInfoImpl(SQueryTableMsg *pQueryMsg, SArray* pTableIdList,
|
||||||
assert(pExprs[col].interBytes >= pExprs[col].bytes);
|
assert(pExprs[col].interBytes >= pExprs[col].bytes);
|
||||||
|
|
||||||
// allocate additional memory for interResults that are usually larger then final results
|
// allocate additional memory for interResults that are usually larger then final results
|
||||||
size_t size = (pQuery->rec.capacity + 1) * pExprs[col].bytes + pExprs[col].interBytes + sizeof(tFilePage);
|
size_t size = (size_t)((pQuery->rec.capacity + 1) * pExprs[col].bytes + pExprs[col].interBytes + sizeof(tFilePage));
|
||||||
pQuery->sdata[col] = (tFilePage *)calloc(1, size);
|
pQuery->sdata[col] = (tFilePage *)calloc(1, size);
|
||||||
if (pQuery->sdata[col] == NULL) {
|
if (pQuery->sdata[col] == NULL) {
|
||||||
goto _cleanup;
|
goto _cleanup;
|
||||||
|
@ -5933,6 +5932,7 @@ static SQInfo *createQInfoImpl(SQueryTableMsg *pQueryMsg, SArray* pTableIdList,
|
||||||
if (p1 == NULL) {
|
if (p1 == NULL) {
|
||||||
goto _cleanup;
|
goto _cleanup;
|
||||||
}
|
}
|
||||||
|
taosArrayPush(pQInfo->tableqinfoGroupInfo.pGroupList, &p1);
|
||||||
|
|
||||||
for(int32_t j = 0; j < s; ++j) {
|
for(int32_t j = 0; j < s; ++j) {
|
||||||
STableKeyInfo* info = taosArrayGet(pa, j);
|
STableKeyInfo* info = taosArrayGet(pa, j);
|
||||||
|
@ -5956,8 +5956,6 @@ static SQInfo *createQInfoImpl(SQueryTableMsg *pQueryMsg, SArray* pTableIdList,
|
||||||
taosHashPut(pQInfo->tableqinfoGroupInfo.map, &id->tid, sizeof(id->tid), &item, POINTER_BYTES);
|
taosHashPut(pQInfo->tableqinfoGroupInfo.map, &id->tid, sizeof(id->tid), &item, POINTER_BYTES);
|
||||||
index += 1;
|
index += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
taosArrayPush(pQInfo->tableqinfoGroupInfo.pGroupList, &p1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pQInfo->arrTableIdInfo = taosArrayInit(tableIndex, sizeof(STableIdInfo));
|
pQInfo->arrTableIdInfo = taosArrayInit(tableIndex, sizeof(STableIdInfo));
|
||||||
|
@ -6164,7 +6162,7 @@ static size_t getResultSize(SQInfo *pQInfo, int64_t *numOfRows) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return pQuery->rowSize * (*numOfRows);
|
return (size_t)(pQuery->rowSize * (*numOfRows));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -990,7 +990,7 @@ void tColModelCompact(SColumnModel *pModel, tFilePage *inputBuffer, int32_t maxE
|
||||||
SSchemaEx* pSchemaEx = &pModel->pFields[i];
|
SSchemaEx* pSchemaEx = &pModel->pFields[i];
|
||||||
memmove(inputBuffer->data + pSchemaEx->offset * inputBuffer->num,
|
memmove(inputBuffer->data + pSchemaEx->offset * inputBuffer->num,
|
||||||
inputBuffer->data + pSchemaEx->offset * maxElemsCapacity,
|
inputBuffer->data + pSchemaEx->offset * maxElemsCapacity,
|
||||||
pSchemaEx->field.bytes * inputBuffer->num);
|
(size_t)(pSchemaEx->field.bytes * inputBuffer->num));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -153,7 +153,7 @@ void taosFillCopyInputDataFromOneFilePage(SFillInfo* pFillInfo, tFilePage* pInpu
|
||||||
SFillColInfo* pCol = &pFillInfo->pFillCol[i];
|
SFillColInfo* pCol = &pFillInfo->pFillCol[i];
|
||||||
|
|
||||||
char* data = pInput->data + pCol->col.offset * pInput->num;
|
char* data = pInput->data + pCol->col.offset * pInput->num;
|
||||||
memcpy(pFillInfo->pData[i], data, pInput->num * pCol->col.bytes);
|
memcpy(pFillInfo->pData[i], data, (size_t)(pInput->num * pCol->col.bytes));
|
||||||
|
|
||||||
if (pCol->flag == TSDB_COL_TAG) { // copy the tag value to tag value buffer
|
if (pCol->flag == TSDB_COL_TAG) { // copy the tag value to tag value buffer
|
||||||
for (int32_t j = 0; j < pFillInfo->numOfTags; ++j) {
|
for (int32_t j = 0; j < pFillInfo->numOfTags; ++j) {
|
||||||
|
|
|
@ -381,7 +381,7 @@ size_t getNumOfRowsPerPage(const SDiskbasedResultBuf* pResultBuf) { return pResu
|
||||||
|
|
||||||
size_t getNumOfResultBufGroupId(const SDiskbasedResultBuf* pResultBuf) { return taosHashGetSize(pResultBuf->groupSet); }
|
size_t getNumOfResultBufGroupId(const SDiskbasedResultBuf* pResultBuf) { return taosHashGetSize(pResultBuf->groupSet); }
|
||||||
|
|
||||||
size_t getResBufSize(const SDiskbasedResultBuf* pResultBuf) { return pResultBuf->totalBufSize; }
|
size_t getResBufSize(const SDiskbasedResultBuf* pResultBuf) { return (size_t)pResultBuf->totalBufSize; }
|
||||||
|
|
||||||
SIDList getDataBufPagesIdList(SDiskbasedResultBuf* pResultBuf, int32_t groupId) {
|
SIDList getDataBufPagesIdList(SDiskbasedResultBuf* pResultBuf, int32_t groupId) {
|
||||||
assert(pResultBuf != NULL);
|
assert(pResultBuf != NULL);
|
||||||
|
|
|
@ -1120,7 +1120,7 @@ static void rpcProcessIncomingMsg(SRpcConn *pConn, SRpcHead *pHead) {
|
||||||
pContext->epSet.port[i] = htons(pContext->epSet.port[i]);
|
pContext->epSet.port[i] = htons(pContext->epSet.port[i]);
|
||||||
rpcSendReqToServer(pRpc, pContext);
|
rpcSendReqToServer(pRpc, pContext);
|
||||||
rpcFreeCont(rpcMsg.pCont);
|
rpcFreeCont(rpcMsg.pCont);
|
||||||
} else if (pHead->code == TSDB_CODE_RPC_NOT_READY) {
|
} else if (pHead->code == TSDB_CODE_RPC_NOT_READY || pHead->code == TSDB_CODE_APP_NOT_READY) {
|
||||||
pContext->code = pHead->code;
|
pContext->code = pHead->code;
|
||||||
rpcProcessConnError(pContext, NULL);
|
rpcProcessConnError(pContext, NULL);
|
||||||
rpcFreeCont(rpcMsg.pCont);
|
rpcFreeCont(rpcMsg.pCont);
|
||||||
|
|
|
@ -71,7 +71,18 @@ static void tsdbStopStream(STsdbRepo *pRepo);
|
||||||
|
|
||||||
// Function declaration
|
// Function declaration
|
||||||
int32_t tsdbCreateRepo(char *rootDir, STsdbCfg *pCfg) {
|
int32_t tsdbCreateRepo(char *rootDir, STsdbCfg *pCfg) {
|
||||||
taosRemoveDir(rootDir);
|
DIR *dir = opendir(rootDir);
|
||||||
|
if (dir) {
|
||||||
|
tsdbDebug("repository %s already exists", rootDir);
|
||||||
|
closedir(dir);
|
||||||
|
return 0;
|
||||||
|
} else {
|
||||||
|
if (ENOENT != errno) {
|
||||||
|
tsdbError("failed to open directory %s since %s", rootDir, strerror(errno));
|
||||||
|
terrno = TAOS_SYSTEM_ERROR(errno);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (mkdir(rootDir, 0755) < 0) {
|
if (mkdir(rootDir, 0755) < 0) {
|
||||||
tsdbError("vgId:%d failed to create rootDir %s since %s", pCfg->tsdbId, rootDir, strerror(errno));
|
tsdbError("vgId:%d failed to create rootDir %s since %s", pCfg->tsdbId, rootDir, strerror(errno));
|
||||||
|
|
|
@ -908,7 +908,7 @@ static int tsdbAddSubBlock(SRWHelper *pHelper, SCompBlock *pCompBlock, int blkId
|
||||||
|
|
||||||
// Add the sub-block
|
// Add the sub-block
|
||||||
if (pSCompBlock->numOfSubBlocks > 1) {
|
if (pSCompBlock->numOfSubBlocks > 1) {
|
||||||
size_t tsize = pIdx->len - (pSCompBlock->offset + pSCompBlock->len);
|
size_t tsize = (size_t)(pIdx->len - (pSCompBlock->offset + pSCompBlock->len));
|
||||||
if (tsize > 0) {
|
if (tsize > 0) {
|
||||||
memmove((void *)((char *)(pHelper->pCompInfo) + pSCompBlock->offset + pSCompBlock->len + sizeof(SCompBlock)),
|
memmove((void *)((char *)(pHelper->pCompInfo) + pSCompBlock->offset + pSCompBlock->len + sizeof(SCompBlock)),
|
||||||
(void *)((char *)(pHelper->pCompInfo) + pSCompBlock->offset + pSCompBlock->len), tsize);
|
(void *)((char *)(pHelper->pCompInfo) + pSCompBlock->offset + pSCompBlock->len), tsize);
|
||||||
|
@ -988,7 +988,7 @@ static int tsdbUpdateSuperBlock(SRWHelper *pHelper, SCompBlock *pCompBlock, int
|
||||||
|
|
||||||
// Delete the sub blocks it has
|
// Delete the sub blocks it has
|
||||||
if (pSCompBlock->numOfSubBlocks > 1) {
|
if (pSCompBlock->numOfSubBlocks > 1) {
|
||||||
size_t tsize = pIdx->len - (pSCompBlock->offset + pSCompBlock->len);
|
size_t tsize = (size_t)(pIdx->len - (pSCompBlock->offset + pSCompBlock->len));
|
||||||
if (tsize > 0) {
|
if (tsize > 0) {
|
||||||
memmove(POINTER_SHIFT(pHelper->pCompInfo, pSCompBlock->offset),
|
memmove(POINTER_SHIFT(pHelper->pCompInfo, pSCompBlock->offset),
|
||||||
POINTER_SHIFT(pHelper->pCompInfo, pSCompBlock->offset + pSCompBlock->len), tsize);
|
POINTER_SHIFT(pHelper->pCompInfo, pSCompBlock->offset + pSCompBlock->len), tsize);
|
||||||
|
|
|
@ -355,9 +355,9 @@ static FORCE_INLINE void *taosDecodeString(void *buf, char **value) {
|
||||||
uint64_t size = 0;
|
uint64_t size = 0;
|
||||||
|
|
||||||
buf = taosDecodeVariantU64(buf, &size);
|
buf = taosDecodeVariantU64(buf, &size);
|
||||||
*value = (char *)malloc(size + 1);
|
*value = (char *)malloc((size_t)size + 1);
|
||||||
if (*value == NULL) return NULL;
|
if (*value == NULL) return NULL;
|
||||||
memcpy(*value, buf, size);
|
memcpy(*value, buf, (size_t)size);
|
||||||
|
|
||||||
(*value)[size] = '\0';
|
(*value)[size] = '\0';
|
||||||
|
|
||||||
|
|
|
@ -575,7 +575,7 @@ static int tdRestoreKVStore(SKVStore *pStore) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
buf = malloc(maxBufSize);
|
buf = malloc((size_t)maxBufSize);
|
||||||
if (buf == NULL) {
|
if (buf == NULL) {
|
||||||
uError("failed to allocate %" PRId64 " bytes in KV store %s", maxBufSize, pStore->fname);
|
uError("failed to allocate %" PRId64 " bytes in KV store %s", maxBufSize, pStore->fname);
|
||||||
terrno = TAOS_SYSTEM_ERROR(errno);
|
terrno = TAOS_SYSTEM_ERROR(errno);
|
||||||
|
@ -598,7 +598,7 @@ static int tdRestoreKVStore(SKVStore *pStore) {
|
||||||
goto _err;
|
goto _err;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (taosTRead(pStore->fd, buf, pRecord->size) < pRecord->size) {
|
if (taosTRead(pStore->fd, buf, (size_t)pRecord->size) < pRecord->size) {
|
||||||
uError("failed to read %" PRId64 " bytes from file %s since %s, offset %" PRId64, pRecord->size, pStore->fname,
|
uError("failed to read %" PRId64 " bytes from file %s since %s, offset %" PRId64, pRecord->size, pStore->fname,
|
||||||
strerror(errno), pRecord->offset);
|
strerror(errno), pRecord->offset);
|
||||||
terrno = TAOS_SYSTEM_ERROR(errno);
|
terrno = TAOS_SYSTEM_ERROR(errno);
|
||||||
|
|
|
@ -231,8 +231,9 @@ SOCKET taosOpenUdpSocket(uint32_t ip, uint16_t port) {
|
||||||
localAddr.sin_addr.s_addr = ip;
|
localAddr.sin_addr.s_addr = ip;
|
||||||
localAddr.sin_port = (uint16_t)htons(port);
|
localAddr.sin_port = (uint16_t)htons(port);
|
||||||
|
|
||||||
if ((sockFd = (int)socket(AF_INET, SOCK_DGRAM, 0)) < 0) {
|
if ((sockFd = (int)socket(AF_INET, SOCK_DGRAM, 0)) <= 2) {
|
||||||
uError("failed to open udp socket: %d (%s)", errno, strerror(errno));
|
uError("failed to open udp socket: %d (%s)", errno, strerror(errno));
|
||||||
|
taosCloseSocketNoCheck(sockFd);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,8 +266,9 @@ SOCKET taosOpenTcpClientSocket(uint32_t destIp, uint16_t destPort, uint32_t clie
|
||||||
|
|
||||||
sockFd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
|
sockFd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
|
||||||
|
|
||||||
if (sockFd < 0) {
|
if (sockFd <= 2) {
|
||||||
uError("failed to open the socket: %d (%s)", errno, strerror(errno));
|
uError("failed to open the socket: %d (%s)", errno, strerror(errno));
|
||||||
|
taosCloseSocketNoCheck(sockFd);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -276,7 +278,7 @@ SOCKET taosOpenTcpClientSocket(uint32_t destIp, uint16_t destPort, uint32_t clie
|
||||||
uError("setsockopt SO_REUSEADDR failed: %d (%s)", errno, strerror(errno));
|
uError("setsockopt SO_REUSEADDR failed: %d (%s)", errno, strerror(errno));
|
||||||
taosCloseSocket(sockFd);
|
taosCloseSocket(sockFd);
|
||||||
return -1;
|
return -1;
|
||||||
};
|
}
|
||||||
|
|
||||||
if (clientIp != 0) {
|
if (clientIp != 0) {
|
||||||
memset((char *)&clientAddr, 0, sizeof(clientAddr));
|
memset((char *)&clientAddr, 0, sizeof(clientAddr));
|
||||||
|
@ -371,8 +373,9 @@ SOCKET taosOpenTcpServerSocket(uint32_t ip, uint16_t port) {
|
||||||
serverAdd.sin_addr.s_addr = ip;
|
serverAdd.sin_addr.s_addr = ip;
|
||||||
serverAdd.sin_port = (uint16_t)htons(port);
|
serverAdd.sin_port = (uint16_t)htons(port);
|
||||||
|
|
||||||
if ((sockFd = (int)socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) < 2) {
|
if ((sockFd = (int)socket(AF_INET, SOCK_STREAM, IPPROTO_TCP)) <= 2) {
|
||||||
uError("failed to open TCP socket: %d (%s)", errno, strerror(errno));
|
uError("failed to open TCP socket: %d (%s)", errno, strerror(errno));
|
||||||
|
taosCloseSocketNoCheck(sockFd);
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
char version[12] = "2.0.1.0";
|
char version[12] = "2.0.1.1";
|
||||||
char compatible_version[12] = "2.0.0.0";
|
char compatible_version[12] = "2.0.0.0";
|
||||||
char gitinfo[48] = "7ac6c2b8de3cd66e180132fc1cf77715237308a1";
|
char gitinfo[48] = "ae1966332948147bacce3d32f9ad539ab8721db2";
|
||||||
char gitinfoOfInternal[48] = "e1e64838ece2b6dbe964ec3a39953455f354d930";
|
char gitinfoOfInternal[48] = "bf53767db56cedb1c484df83a1f10536f12647ad";
|
||||||
char buildinfo[64] = "Built by root at 2020-08-17 11:13";
|
char buildinfo[64] = "Built by root at 2020-08-20 15:46";
|
||||||
|
|
||||||
void libtaos_2_0_1_0_Linux_x64() {};
|
void libtaos_2_0_1_1_Linux_x64() {};
|
||||||
|
|