[TD-2984] <fix>: combine taosdemo and taosdemox. no need taos_init().
This commit is contained in:
commit
22ab19df84
|
@ -79,3 +79,15 @@ tests/comparisonTest/opentsdb/opentsdbtest/.settings/
|
||||||
tests/examples/JDBC/JDBCDemo/.classpath
|
tests/examples/JDBC/JDBCDemo/.classpath
|
||||||
tests/examples/JDBC/JDBCDemo/.project
|
tests/examples/JDBC/JDBCDemo/.project
|
||||||
tests/examples/JDBC/JDBCDemo/.settings/
|
tests/examples/JDBC/JDBCDemo/.settings/
|
||||||
|
|
||||||
|
# Emacs
|
||||||
|
# -*- mode: gitignore; -*-
|
||||||
|
*~
|
||||||
|
\#*\#
|
||||||
|
/.emacs.desktop
|
||||||
|
/.emacs.desktop.lock
|
||||||
|
*.elc
|
||||||
|
auto-save-list
|
||||||
|
tramp
|
||||||
|
.\#*
|
||||||
|
TAGS
|
||||||
|
|
|
@ -5,7 +5,7 @@ node {
|
||||||
git url: 'https://github.com/taosdata/TDengine.git'
|
git url: 'https://github.com/taosdata/TDengine.git'
|
||||||
}
|
}
|
||||||
|
|
||||||
def kipstage=0
|
def skipstage=0
|
||||||
def abortPreviousBuilds() {
|
def abortPreviousBuilds() {
|
||||||
def currentJobName = env.JOB_NAME
|
def currentJobName = env.JOB_NAME
|
||||||
def currentBuildNumber = env.BUILD_NUMBER.toInteger()
|
def currentBuildNumber = env.BUILD_NUMBER.toInteger()
|
||||||
|
@ -88,8 +88,9 @@ pipeline {
|
||||||
git checkout -qf FETCH_HEAD
|
git checkout -qf FETCH_HEAD
|
||||||
'''
|
'''
|
||||||
script{
|
script{
|
||||||
skipstage=sh(script:"git --no-pager diff --name-only FETCH_HEAD develop|grep -v -E '.*md|//src//connector|Jenkinsfile|test-all.sh' || echo 0 ",returnStdout:true)
|
env.skipstage=sh(script:"cd ${WORKSPACE}.tes && git --no-pager diff --name-only FETCH_HEAD develop|grep -v -E '.*md|//src//connector|Jenkinsfile|test-all.sh' || echo 0 ",returnStdout:true)
|
||||||
}
|
}
|
||||||
|
println env.skipstage
|
||||||
sh'''
|
sh'''
|
||||||
rm -rf ${WORKSPACE}.tes
|
rm -rf ${WORKSPACE}.tes
|
||||||
'''
|
'''
|
||||||
|
@ -101,7 +102,7 @@ pipeline {
|
||||||
when {
|
when {
|
||||||
changeRequest()
|
changeRequest()
|
||||||
expression {
|
expression {
|
||||||
skipstage != 0
|
env.skipstage != 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
parallel {
|
parallel {
|
||||||
|
|
|
@ -24,6 +24,7 @@ TDengine提供了丰富的应用程序开发接口,其中包括C/C++、Java、
|
||||||
* 在没有安装TDengine服务端软件的系统中使用连接器(除RESTful外)访问 TDengine 数据库,需要安装相应版本的客户端安装包来使应用驱动(Linux系统中文件名为libtaos.so,Windows系统中为taos.dll)被安装在系统中,否则会产生无法找到相应库文件的错误。
|
* 在没有安装TDengine服务端软件的系统中使用连接器(除RESTful外)访问 TDengine 数据库,需要安装相应版本的客户端安装包来使应用驱动(Linux系统中文件名为libtaos.so,Windows系统中为taos.dll)被安装在系统中,否则会产生无法找到相应库文件的错误。
|
||||||
* 所有执行 SQL 语句的 API,例如 C/C++ Connector 中的 `tao_query`、`taos_query_a`、`taos_subscribe` 等,以及其它语言中与它们对应的API,每次都只能执行一条 SQL 语句,如果实际参数中包含了多条语句,它们的行为是未定义的。
|
* 所有执行 SQL 语句的 API,例如 C/C++ Connector 中的 `tao_query`、`taos_query_a`、`taos_subscribe` 等,以及其它语言中与它们对应的API,每次都只能执行一条 SQL 语句,如果实际参数中包含了多条语句,它们的行为是未定义的。
|
||||||
* 升级到TDengine到2.0.8.0版本的用户,必须更新JDBC连接TDengine必须升级taos-jdbcdriver到2.0.12及以上。
|
* 升级到TDengine到2.0.8.0版本的用户,必须更新JDBC连接TDengine必须升级taos-jdbcdriver到2.0.12及以上。
|
||||||
|
* 无论选用何种编程语言的连接器,2.0 及以上版本的 TDengine 推荐数据库应用的每个线程都建立一个独立的连接,或基于线程建立连接池,以避免连接内的“USE statement”状态量在线程之间相互干扰(但连接的查询和写入操作都是线程安全的)。
|
||||||
|
|
||||||
## <a class="anchor" id="driver"></a>安装连接器驱动步骤
|
## <a class="anchor" id="driver"></a>安装连接器驱动步骤
|
||||||
|
|
||||||
|
@ -238,13 +239,13 @@ C/C++的API类似于MySQL的C API。应用程序使用时,需要包含TDengine
|
||||||
|
|
||||||
获取查询结果集每列数据的属性(数据类型、名字、字节数),与taos_num_fileds配合使用,可用来解析`taos_fetch_row`返回的一个元组(一行)的数据。 `TAOS_FIELD` 的结构如下:
|
获取查询结果集每列数据的属性(数据类型、名字、字节数),与taos_num_fileds配合使用,可用来解析`taos_fetch_row`返回的一个元组(一行)的数据。 `TAOS_FIELD` 的结构如下:
|
||||||
|
|
||||||
```c
|
```c
|
||||||
typedef struct taosField {
|
typedef struct taosField {
|
||||||
char name[65]; // 列名
|
char name[65]; // 列名
|
||||||
uint8_t type; // 数据类型
|
uint8_t type; // 数据类型
|
||||||
int16_t bytes; // 字节数
|
int16_t bytes; // 字节数
|
||||||
} TAOS_FIELD;
|
} TAOS_FIELD;
|
||||||
```
|
```
|
||||||
|
|
||||||
- `void taos_stop_query(TAOS_RES *res)`
|
- `void taos_stop_query(TAOS_RES *res)`
|
||||||
|
|
||||||
|
@ -266,7 +267,7 @@ C/C++的API类似于MySQL的C API。应用程序使用时,需要包含TDengine
|
||||||
|
|
||||||
### 异步查询API
|
### 异步查询API
|
||||||
|
|
||||||
同步API之外,TDengine还提供性能更高的异步调用API处理数据插入、查询操作。在软硬件环境相同的情况下,异步API处理数据插入的速度比同步API快2\~4倍。异步API采用非阻塞式的调用方式,在系统真正完成某个具体数据库操作前,立即返回。调用的线程可以去处理其他工作,从而可以提升整个应用的性能。异步API在网络延迟严重的情况下,优点尤为突出。
|
同步API之外,TDengine还提供性能更高的异步调用API处理数据插入、查询操作。在软硬件环境相同的情况下,异步API处理数据插入的速度比同步API快2~4倍。异步API采用非阻塞式的调用方式,在系统真正完成某个具体数据库操作前,立即返回。调用的线程可以去处理其他工作,从而可以提升整个应用的性能。异步API在网络延迟严重的情况下,优点尤为突出。
|
||||||
|
|
||||||
异步API都需要应用提供相应的回调函数,回调函数参数设置如下:前两个参数都是一致的,第三个参数依不同的API而定。第一个参数param是应用调用异步API时提供给系统的,用于回调时,应用能够找回具体操作的上下文,依具体实现而定。第二个参数是SQL操作的结果集,如果为空,比如insert操作,表示没有记录返回,如果不为空,比如select操作,表示有记录返回。
|
异步API都需要应用提供相应的回调函数,回调函数参数设置如下:前两个参数都是一致的,第三个参数依不同的API而定。第一个参数param是应用调用异步API时提供给系统的,用于回调时,应用能够找回具体操作的上下文,依具体实现而定。第二个参数是SQL操作的结果集,如果为空,比如insert操作,表示没有记录返回,如果不为空,比如select操作,表示有记录返回。
|
||||||
|
|
||||||
|
@ -896,7 +897,7 @@ Node-example-raw.js
|
||||||
|
|
||||||
验证方法:
|
验证方法:
|
||||||
|
|
||||||
1. 新建安装验证目录,例如:\~/tdengine-test,拷贝github上nodejsChecker.js源程序。下载地址:(https://github.com/taosdata/TDengine/tree/develop/tests/examples/nodejs/nodejsChecker.js)。
|
1. 新建安装验证目录,例如:`~/tdengine-test`,拷贝github上nodejsChecker.js源程序。下载地址:(https://github.com/taosdata/TDengine/tree/develop/tests/examples/nodejs/nodejsChecker.js)。
|
||||||
|
|
||||||
2. 在命令中执行以下命令:
|
2. 在命令中执行以下命令:
|
||||||
|
|
||||||
|
|
|
@ -102,7 +102,7 @@ taosd -C
|
||||||
- maxSQLLength:单条SQL语句允许最长限制。默认值:65380字节。
|
- maxSQLLength:单条SQL语句允许最长限制。默认值:65380字节。
|
||||||
- telemetryReporting: 是否允许 TDengine 采集和上报基本使用信息,0表示不允许,1表示允许。 默认值:1。
|
- telemetryReporting: 是否允许 TDengine 采集和上报基本使用信息,0表示不允许,1表示允许。 默认值:1。
|
||||||
- stream: 是否启用连续查询(流计算功能),0表示不允许,1表示允许。 默认值:1。
|
- stream: 是否启用连续查询(流计算功能),0表示不允许,1表示允许。 默认值:1。
|
||||||
- queryBufferSize: 为所有并发查询占用保留的内存大小。计算规则可以根据实际应用可能的最大并发数和表的数字相乘,再乘 170 。单位为字节。
|
- queryBufferSize: 为所有并发查询占用保留的内存大小。计算规则可以根据实际应用可能的最大并发数和表的数字相乘,再乘 170 。单位为 MB(2.0.15 以前的版本中,此参数的单位是字节)。
|
||||||
- ratioOfQueryCores: 设置查询线程的最大数量。最小值0 表示只有1个查询线程;最大值2表示最大建立2倍CPU核数的查询线程。默认为1,表示最大和CPU核数相等的查询线程。该值可以为小数,即0.5表示最大建立CPU核数一半的查询线程。
|
- ratioOfQueryCores: 设置查询线程的最大数量。最小值0 表示只有1个查询线程;最大值2表示最大建立2倍CPU核数的查询线程。默认为1,表示最大和CPU核数相等的查询线程。该值可以为小数,即0.5表示最大建立CPU核数一半的查询线程。
|
||||||
|
|
||||||
**注意:**对于端口,TDengine会使用从serverPort起13个连续的TCP和UDP端口号,请务必在防火墙打开。因此如果是缺省配置,需要打开从6030到6042共13个端口,而且必须TCP和UDP都打开。
|
**注意:**对于端口,TDengine会使用从serverPort起13个连续的TCP和UDP端口号,请务必在防火墙打开。因此如果是缺省配置,需要打开从6030到6042共13个端口,而且必须TCP和UDP都打开。
|
||||||
|
|
|
@ -267,6 +267,7 @@ TDengine缺省的时间戳是毫秒精度,但通过修改配置参数enableMic
|
||||||
```
|
```
|
||||||
|
|
||||||
## <a class="anchor" id="tags"></a>超级表 STable 中 TAG 管理
|
## <a class="anchor" id="tags"></a>超级表 STable 中 TAG 管理
|
||||||
|
|
||||||
- **添加标签**
|
- **添加标签**
|
||||||
|
|
||||||
```mysql
|
```mysql
|
||||||
|
|
|
@ -140,3 +140,20 @@ TDengine是根据hostname唯一标志一台机器的,在数据文件从机器A
|
||||||
- 2.0.7.0 及以后的版本,到/var/lib/taos/dnode下,修复dnodeEps.json的dnodeId对应的FQDN,重启。确保机器内所有机器的此文件是完全相同的。
|
- 2.0.7.0 及以后的版本,到/var/lib/taos/dnode下,修复dnodeEps.json的dnodeId对应的FQDN,重启。确保机器内所有机器的此文件是完全相同的。
|
||||||
- 1.x 和 2.x 版本的存储结构不兼容,需要使用迁移工具或者自己开发应用导出导入数据。
|
- 1.x 和 2.x 版本的存储结构不兼容,需要使用迁移工具或者自己开发应用导出导入数据。
|
||||||
|
|
||||||
|
## 17. 如何在命令行程序 taos 中临时调整日志级别
|
||||||
|
|
||||||
|
为了调试方便,从 2.0.16 版本开始,命令行程序 taos 新增了与日志记录相关的两条指令:
|
||||||
|
|
||||||
|
```mysql
|
||||||
|
ALTER LOCAL flag_name flag_value;
|
||||||
|
```
|
||||||
|
|
||||||
|
其含义是,在当前的命令行程序下,修改一个特定模块的日志记录级别(只对当前命令行程序有效,如果 taos 命令行程序重启,则需要重新设置):
|
||||||
|
- flag_name 的取值可以是:debugFlag,cDebugFlag,tmrDebugFlag,uDebugFlag,rpcDebugFlag
|
||||||
|
- flag_value 的取值可以是:131(输出错误和警告日志),135( 输出错误、警告和调试日志),143( 输出错误、警告、调试和跟踪日志)
|
||||||
|
|
||||||
|
```mysql
|
||||||
|
ALTER LOCAL RESETLOG;
|
||||||
|
```
|
||||||
|
|
||||||
|
其含义是,清空本机所有由客户端生成的日志文件。
|
||||||
|
|
|
@ -35,10 +35,11 @@ done
|
||||||
|
|
||||||
echo "verNumber=${verNumber}"
|
echo "verNumber=${verNumber}"
|
||||||
|
|
||||||
docker manifest create -a tdengine/tdengine:${verNumber} tdengine/tdengine-amd64:${verNumber} tdengine/tdengine-aarch64:${verNumber} tdengine/tdengine-aarch32:${verNumber}
|
#docker manifest create -a tdengine/tdengine:${verNumber} tdengine/tdengine-amd64:${verNumber} tdengine/tdengine-aarch64:${verNumber} tdengine/tdengine-aarch32:${verNumber}
|
||||||
|
docker manifest create -a tdengine/tdengine tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
|
||||||
|
|
||||||
docker login -u tdengine -p ${passWord} #replace the docker registry username and password
|
docker login -u tdengine -p ${passWord} #replace the docker registry username and password
|
||||||
|
|
||||||
docker manifest push tdengine/tdengine:${verNumber}
|
docker manifest push tdengine/tdengine
|
||||||
|
|
||||||
# how set latest version ???
|
# how set latest version ???
|
||||||
|
|
|
@ -9,6 +9,7 @@ Summary: tdengine from taosdata
|
||||||
Group: Application/Database
|
Group: Application/Database
|
||||||
License: AGPL
|
License: AGPL
|
||||||
URL: www.taosdata.com
|
URL: www.taosdata.com
|
||||||
|
AutoReqProv: no
|
||||||
|
|
||||||
#BuildRoot: %_topdir/BUILDROOT
|
#BuildRoot: %_topdir/BUILDROOT
|
||||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
|
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
|
||||||
|
|
|
@ -6375,16 +6375,14 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
|
||||||
// get table meta from mnode
|
// get table meta from mnode
|
||||||
code = tNameExtractFullName(&pStableMetaInfo->name, pCreateTableInfo->tagdata.name);
|
code = tNameExtractFullName(&pStableMetaInfo->name, pCreateTableInfo->tagdata.name);
|
||||||
|
|
||||||
SArray* pList = pCreateTableInfo->pTagVals;
|
SArray* pValList = pCreateTableInfo->pTagVals;
|
||||||
code = tscGetTableMeta(pSql, pStableMetaInfo);
|
code = tscGetTableMeta(pSql, pStableMetaInfo);
|
||||||
if (code != TSDB_CODE_SUCCESS) {
|
if (code != TSDB_CODE_SUCCESS) {
|
||||||
return code;
|
return code;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t size = taosArrayGetSize(pList);
|
size_t valSize = taosArrayGetSize(pValList);
|
||||||
if (tscGetNumOfTags(pStableMetaInfo->pTableMeta) != size) {
|
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
|
|
||||||
}
|
|
||||||
|
|
||||||
// too long tag values will return invalid sql, not be truncated automatically
|
// too long tag values will return invalid sql, not be truncated automatically
|
||||||
SSchema *pTagSchema = tscGetTableTagSchema(pStableMetaInfo->pTableMeta);
|
SSchema *pTagSchema = tscGetTableTagSchema(pStableMetaInfo->pTableMeta);
|
||||||
|
@ -6395,36 +6393,111 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
|
||||||
return TSDB_CODE_TSC_OUT_OF_MEMORY;
|
return TSDB_CODE_TSC_OUT_OF_MEMORY;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SArray* pNameList = NULL;
|
||||||
|
size_t nameSize = 0;
|
||||||
|
int32_t schemaSize = tscGetNumOfTags(pStableMetaInfo->pTableMeta);
|
||||||
int32_t ret = TSDB_CODE_SUCCESS;
|
int32_t ret = TSDB_CODE_SUCCESS;
|
||||||
for (int32_t i = 0; i < size; ++i) {
|
|
||||||
SSchema* pSchema = &pTagSchema[i];
|
|
||||||
tVariantListItem* pItem = taosArrayGet(pList, i);
|
|
||||||
|
|
||||||
char tagVal[TSDB_MAX_TAGS_LEN];
|
if (pCreateTableInfo->pTagNames) {
|
||||||
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
pNameList = pCreateTableInfo->pTagNames;
|
||||||
if (pItem->pVar.nLen > pSchema->bytes) {
|
nameSize = taosArrayGetSize(pNameList);
|
||||||
tdDestroyKVRowBuilder(&kvRowBuilder);
|
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
if (valSize != nameSize) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (schemaSize < valSize) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool findColumnIndex = false;
|
||||||
|
|
||||||
|
for (int32_t i = 0; i < nameSize; ++i) {
|
||||||
|
SStrToken* sToken = taosArrayGet(pNameList, i);
|
||||||
|
if (TK_STRING == sToken->type) {
|
||||||
|
tscDequoteAndTrimToken(sToken);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true);
|
tVariantListItem* pItem = taosArrayGet(pValList, i);
|
||||||
|
|
||||||
// check again after the convert since it may be converted from binary to nchar.
|
findColumnIndex = false;
|
||||||
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
|
||||||
int16_t len = varDataTLen(tagVal);
|
// todo speedup by using hash list
|
||||||
if (len > pSchema->bytes) {
|
for (int32_t t = 0; t < schemaSize; ++t) {
|
||||||
tdDestroyKVRowBuilder(&kvRowBuilder);
|
if (strncmp(sToken->z, pTagSchema[t].name, sToken->n) == 0 && strlen(pTagSchema[t].name) == sToken->n) {
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
SSchema* pSchema = &pTagSchema[t];
|
||||||
|
|
||||||
|
char tagVal[TSDB_MAX_TAGS_LEN];
|
||||||
|
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
||||||
|
if (pItem->pVar.nLen > pSchema->bytes) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true);
|
||||||
|
|
||||||
|
// check again after the convert since it may be converted from binary to nchar.
|
||||||
|
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
||||||
|
int16_t len = varDataTLen(tagVal);
|
||||||
|
if (len > pSchema->bytes) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ret != TSDB_CODE_SUCCESS) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4);
|
||||||
|
}
|
||||||
|
|
||||||
|
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
|
||||||
|
|
||||||
|
findColumnIndex = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!findColumnIndex) {
|
||||||
|
return tscInvalidSQLErrMsg(pCmd->payload, "invalid tag name", sToken->z);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (schemaSize != valSize) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ret != TSDB_CODE_SUCCESS) {
|
for (int32_t i = 0; i < valSize; ++i) {
|
||||||
tdDestroyKVRowBuilder(&kvRowBuilder);
|
SSchema* pSchema = &pTagSchema[i];
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4);
|
tVariantListItem* pItem = taosArrayGet(pValList, i);
|
||||||
|
|
||||||
|
char tagVal[TSDB_MAX_TAGS_LEN];
|
||||||
|
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
||||||
|
if (pItem->pVar.nLen > pSchema->bytes) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true);
|
||||||
|
|
||||||
|
// check again after the convert since it may be converted from binary to nchar.
|
||||||
|
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
|
||||||
|
int16_t len = varDataTLen(tagVal);
|
||||||
|
if (len > pSchema->bytes) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ret != TSDB_CODE_SUCCESS) {
|
||||||
|
tdDestroyKVRowBuilder(&kvRowBuilder);
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4);
|
||||||
|
}
|
||||||
|
|
||||||
|
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
SKVRow row = tdGetKVRowFromBuilder(&kvRowBuilder);
|
SKVRow row = tdGetKVRowFromBuilder(&kvRowBuilder);
|
||||||
|
|
|
@ -52,7 +52,9 @@ static bool validPassword(const char* passwd) {
|
||||||
|
|
||||||
static SSqlObj *taosConnectImpl(const char *ip, const char *user, const char *pass, const char *auth, const char *db,
|
static SSqlObj *taosConnectImpl(const char *ip, const char *user, const char *pass, const char *auth, const char *db,
|
||||||
uint16_t port, void (*fp)(void *, TAOS_RES *, int), void *param, TAOS **taos) {
|
uint16_t port, void (*fp)(void *, TAOS_RES *, int), void *param, TAOS **taos) {
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
if (!validUserName(user)) {
|
if (!validUserName(user)) {
|
||||||
terrno = TSDB_CODE_TSC_INVALID_USER_LENGTH;
|
terrno = TSDB_CODE_TSC_INVALID_USER_LENGTH;
|
||||||
|
|
|
@ -47,6 +47,7 @@ void *tscRpcCache; // cache to keep rpc obj
|
||||||
int32_t tscNumOfThreads = 1; // num of rpc threads
|
int32_t tscNumOfThreads = 1; // num of rpc threads
|
||||||
static pthread_mutex_t rpcObjMutex; // mutex to protect open the rpc obj concurrently
|
static pthread_mutex_t rpcObjMutex; // mutex to protect open the rpc obj concurrently
|
||||||
static pthread_once_t tscinit = PTHREAD_ONCE_INIT;
|
static pthread_once_t tscinit = PTHREAD_ONCE_INIT;
|
||||||
|
static volatile int tscInitRes = 0;
|
||||||
|
|
||||||
void tscCheckDiskUsage(void *UNUSED_PARAM(para), void *UNUSED_PARAM(param)) {
|
void tscCheckDiskUsage(void *UNUSED_PARAM(para), void *UNUSED_PARAM(param)) {
|
||||||
taosGetDisk();
|
taosGetDisk();
|
||||||
|
@ -137,7 +138,11 @@ void taos_init_imp(void) {
|
||||||
}
|
}
|
||||||
|
|
||||||
taosReadGlobalCfg();
|
taosReadGlobalCfg();
|
||||||
taosCheckGlobalCfg();
|
if (taosCheckGlobalCfg()) {
|
||||||
|
tscInitRes = -1;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
taosInitNotes();
|
taosInitNotes();
|
||||||
|
|
||||||
rpcInit();
|
rpcInit();
|
||||||
|
@ -159,6 +164,7 @@ void taos_init_imp(void) {
|
||||||
tscQhandle = taosInitScheduler(queueSize, tscNumOfThreads, "tsc");
|
tscQhandle = taosInitScheduler(queueSize, tscNumOfThreads, "tsc");
|
||||||
if (NULL == tscQhandle) {
|
if (NULL == tscQhandle) {
|
||||||
tscError("failed to init scheduler");
|
tscError("failed to init scheduler");
|
||||||
|
tscInitRes = -1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,7 +193,7 @@ void taos_init_imp(void) {
|
||||||
tscDebug("client is initialized successfully");
|
tscDebug("client is initialized successfully");
|
||||||
}
|
}
|
||||||
|
|
||||||
void taos_init() { pthread_once(&tscinit, taos_init_imp); }
|
int taos_init() { pthread_once(&tscinit, taos_init_imp); return tscInitRes;}
|
||||||
|
|
||||||
// this function may be called by user or system, or by both simultaneously.
|
// this function may be called by user or system, or by both simultaneously.
|
||||||
void taos_cleanup(void) {
|
void taos_cleanup(void) {
|
||||||
|
|
|
@ -373,6 +373,23 @@ static void taosCheckDataDirCfg() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static int32_t taosCheckTmpDir(void) {
|
||||||
|
if (strlen(tsTempDir) <= 0){
|
||||||
|
uError("tempDir is not set");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
DIR *dir = opendir(tsTempDir);
|
||||||
|
if (dir == NULL) {
|
||||||
|
uError("can not open tempDir:%s, error:%s", tsTempDir, strerror(errno));
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
closedir(dir);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
static void doInitGlobalConfig(void) {
|
static void doInitGlobalConfig(void) {
|
||||||
osInit();
|
osInit();
|
||||||
srand(taosSafeRand());
|
srand(taosSafeRand());
|
||||||
|
@ -1488,6 +1505,11 @@ int32_t taosCheckGlobalCfg() {
|
||||||
}
|
}
|
||||||
|
|
||||||
taosCheckDataDirCfg();
|
taosCheckDataDirCfg();
|
||||||
|
|
||||||
|
if (taosCheckTmpDir()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
taosGetSystemInfo();
|
taosGetSystemInfo();
|
||||||
|
|
||||||
tsSetLocale();
|
tsSetLocale();
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.5",
|
version="2.0.6",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -3,12 +3,12 @@ from .connection import TDengineConnection
|
||||||
from .cursor import TDengineCursor
|
from .cursor import TDengineCursor
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
apilevel = '2.0.3'
|
|
||||||
threadsafety = 0
|
threadsafety = 0
|
||||||
paramstyle = 'pyformat'
|
paramstyle = 'pyformat'
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
__all__ = ['connection', 'cursor']
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
def connect(*args, **kwargs):
|
||||||
""" Function to return a TDengine connector object
|
""" Function to return a TDengine connector object
|
||||||
|
|
||||||
|
@ -21,4 +21,4 @@ def connect(*args, **kwargs):
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
@rtype: TDengineConnector
|
||||||
"""
|
"""
|
||||||
return TDengineConnection(*args, **kwargs)
|
return TDengineConnection(*args, **kwargs)
|
||||||
|
|
|
@ -4,11 +4,14 @@ from .error import *
|
||||||
import math
|
import math
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
def _convert_millisecond_to_datetime(milli):
|
||||||
return datetime.datetime.fromtimestamp(milli/1000.0)
|
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
def _convert_microsecond_to_datetime(micro):
|
||||||
return datetime.datetime.fromtimestamp(micro/1000000.0)
|
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
|
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_bool))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C tinyint row to python row
|
"""Function to convert C tinyint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_tinyint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C tinyint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C smallint row to python row
|
"""Function to convert C smallint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_smallint_unsigned_to_python(
|
||||||
|
data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C smallint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C int row to python row
|
"""Function to convert C int row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C int row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bigint row to python row
|
"""Function to convert C bigint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_bigint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C bigint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C float row to python row
|
"""Function to convert C float row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C double row to python row
|
"""Function to convert C double row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
tmpstr = ctypes.c_char_p(data)
|
tmpstr = ctypes.c_char_p(data)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
else:
|
else:
|
||||||
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i + 2,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
_CONVERT_FUNC = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
FieldType.C_BINARY: _crow_binary_to_python,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python
|
FieldType.C_NCHAR: _crow_nchar_to_python,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
_CONVERT_FUNC_BLOCK = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
FieldType.C_BINARY: _crow_binary_to_python_block,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python_block
|
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
# Corresponding TAOS_FIELD structure in C
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
class TaosField(ctypes.Structure):
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
_fields_ = [('name', ctypes.c_char * 65),
|
||||||
('type', ctypes.c_char),
|
('type', ctypes.c_char),
|
||||||
('bytes', ctypes.c_short)]
|
('bytes', ctypes.c_short)]
|
||||||
|
|
||||||
# C interface class
|
# C interface class
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
class CTaosInterface(object):
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.so')
|
libtaos = ctypes.CDLL('libtaos.so')
|
||||||
|
@ -216,7 +360,7 @@ class CTaosInterface(object):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("config is expected as a str")
|
raise AttributeError("config is expected as a str")
|
||||||
|
|
||||||
if config != None:
|
if config is not None:
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
CTaosInterface.libtaos.taos_options(3, self._config)
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
CTaosInterface.libtaos.taos_init()
|
||||||
|
@ -227,7 +371,13 @@ class CTaosInterface(object):
|
||||||
"""
|
"""
|
||||||
return self._config
|
return self._config
|
||||||
|
|
||||||
def connect(self, host=None, user="root", password="taosdata", db=None, port=0):
|
def connect(
|
||||||
|
self,
|
||||||
|
host=None,
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
db=None,
|
||||||
|
port=0):
|
||||||
'''
|
'''
|
||||||
Function to connect to server
|
Function to connect to server
|
||||||
|
|
||||||
|
@ -236,7 +386,7 @@ class CTaosInterface(object):
|
||||||
# host
|
# host
|
||||||
try:
|
try:
|
||||||
_host = ctypes.c_char_p(host.encode(
|
_host = ctypes.c_char_p(host.encode(
|
||||||
"utf-8")) if host != None else ctypes.c_char_p(None)
|
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("host is expected as a str")
|
raise AttributeError("host is expected as a str")
|
||||||
|
|
||||||
|
@ -255,7 +405,7 @@ class CTaosInterface(object):
|
||||||
# db
|
# db
|
||||||
try:
|
try:
|
||||||
_db = ctypes.c_char_p(
|
_db = ctypes.c_char_p(
|
||||||
db.encode("utf-8")) if db != None else ctypes.c_char_p(None)
|
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("db is expected as a str")
|
raise AttributeError("db is expected as a str")
|
||||||
|
|
||||||
|
@ -268,11 +418,11 @@ class CTaosInterface(object):
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
||||||
_host, _user, _password, _db, _port))
|
_host, _user, _password, _db, _port))
|
||||||
|
|
||||||
if connection.value == None:
|
if connection.value is None:
|
||||||
print('connect to TDengine failed')
|
print('connect to TDengine failed')
|
||||||
raise ConnectionError("connect to TDengine failed")
|
raise ConnectionError("connect to TDengine failed")
|
||||||
# sys.exit(1)
|
# sys.exit(1)
|
||||||
#else:
|
# else:
|
||||||
# print('connect to TDengine success')
|
# print('connect to TDengine success')
|
||||||
|
|
||||||
return connection
|
return connection
|
||||||
|
@ -293,12 +443,13 @@ class CTaosInterface(object):
|
||||||
@rtype: 0 on success and -1 on failure
|
@rtype: 0 on success and -1 on failure
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8')))
|
return CTaosInterface.libtaos.taos_query(
|
||||||
|
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("sql is expected as a string")
|
raise AttributeError("sql is expected as a string")
|
||||||
# finally:
|
# finally:
|
||||||
# CTaosInterface.libtaos.close(connection)
|
# CTaosInterface.libtaos.close(connection)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def affectedRows(result):
|
def affectedRows(result):
|
||||||
"""The affected rows after runing query
|
"""The affected rows after runing query
|
||||||
|
@ -308,7 +459,7 @@ class CTaosInterface(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
def subscribe(connection, restart, topic, sql, interval):
|
||||||
"""Create a subscription
|
"""Create a subscription
|
||||||
@restart boolean,
|
@restart boolean,
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
@sql string, sql statement for data query, must be a 'select' statement.
|
||||||
@topic string, name of this subscription
|
@topic string, name of this subscription
|
||||||
"""
|
"""
|
||||||
|
@ -360,38 +511,53 @@ class CTaosInterface(object):
|
||||||
result, ctypes.byref(pblock))
|
result, ctypes.byref(pblock))
|
||||||
if num_of_rows == 0:
|
if num_of_rows == 0:
|
||||||
return None, 0
|
return None, 0
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError("Invalid data type returned from database")
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fetchRow(result, fields):
|
def fetchRow(result, fields):
|
||||||
pblock = ctypes.c_void_p(0)
|
pblock = ctypes.c_void_p(0)
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
||||||
if pblock :
|
if pblock:
|
||||||
num_of_rows = 1
|
num_of_rows = 1
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
if fields[i]['type'] not in _CONVERT_FUNC:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError(
|
||||||
|
"Invalid data type returned from database")
|
||||||
if data is None:
|
if data is None:
|
||||||
blocks[i] = [None]
|
blocks[i] = [None]
|
||||||
else:
|
else:
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
else:
|
else:
|
||||||
return None, 0
|
return None, 0
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def freeResult(result):
|
def freeResult(result):
|
||||||
CTaosInterface.libtaos.taos_free_result(result)
|
CTaosInterface.libtaos.taos_free_result(result)
|
||||||
|
|
|
@ -2,9 +2,11 @@ from .cursor import TDengineCursor
|
||||||
from .subscription import TDengineSubscription
|
from .subscription import TDengineSubscription
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
class TDengineConnection(object):
|
||||||
""" TDengine connection object
|
""" TDengine connection object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self._conn = None
|
self._conn = None
|
||||||
self._host = None
|
self._host = None
|
||||||
|
@ -29,7 +31,7 @@ class TDengineConnection(object):
|
||||||
# password
|
# password
|
||||||
if 'password' in kwargs:
|
if 'password' in kwargs:
|
||||||
self._password = kwargs['password']
|
self._password = kwargs['password']
|
||||||
|
|
||||||
# database
|
# database
|
||||||
if 'database' in kwargs:
|
if 'database' in kwargs:
|
||||||
self._database = kwargs['database']
|
self._database = kwargs['database']
|
||||||
|
@ -43,7 +45,12 @@ class TDengineConnection(object):
|
||||||
self._config = kwargs['config']
|
self._config = kwargs['config']
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
self._chandle = CTaosInterface(self._config)
|
||||||
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port)
|
self._conn = self._chandle.connect(
|
||||||
|
self._host,
|
||||||
|
self._user,
|
||||||
|
self._password,
|
||||||
|
self._database,
|
||||||
|
self._port)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close current connection.
|
"""Close current connection.
|
||||||
|
@ -55,7 +62,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
if self._conn is None:
|
if self._conn is None:
|
||||||
return None
|
return None
|
||||||
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval)
|
sub = CTaosInterface.subscribe(
|
||||||
|
self._conn, restart, topic, sql, interval)
|
||||||
return TDengineSubscription(sub)
|
return TDengineSubscription(sub)
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
|
@ -80,7 +88,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
conn = TDengineConnection(host='192.168.1.107')
|
||||||
conn.close()
|
conn.close()
|
||||||
print("Hello world")
|
print("Hello world")
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
from .dbapi import *
|
from .dbapi import *
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
class FieldType(object):
|
||||||
"""TDengine Field Types
|
"""TDengine Field Types
|
||||||
"""
|
"""
|
||||||
|
@ -18,13 +19,21 @@ class FieldType(object):
|
||||||
C_BINARY = 8
|
C_BINARY = 8
|
||||||
C_TIMESTAMP = 9
|
C_TIMESTAMP = 9
|
||||||
C_NCHAR = 10
|
C_NCHAR = 10
|
||||||
|
C_TINYINT_UNSIGNED = 12
|
||||||
|
C_SMALLINT_UNSIGNED = 13
|
||||||
|
C_INT_UNSIGNED = 14
|
||||||
|
C_BIGINT_UNSIGNED = 15
|
||||||
# NULL value definition
|
# NULL value definition
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
# NOTE: These values should change according to C definition in tsdb.h
|
||||||
C_BOOL_NULL = 0x02
|
C_BOOL_NULL = 0x02
|
||||||
C_TINYINT_NULL = -128
|
C_TINYINT_NULL = -128
|
||||||
|
C_TINYINT_UNSIGNED_NULL = 255
|
||||||
C_SMALLINT_NULL = -32768
|
C_SMALLINT_NULL = -32768
|
||||||
|
C_SMALLINT_UNSIGNED_NULL = 65535
|
||||||
C_INT_NULL = -2147483648
|
C_INT_NULL = -2147483648
|
||||||
|
C_INT_UNSIGNED_NULL = 4294967295
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
C_BIGINT_NULL = -9223372036854775808
|
||||||
|
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
||||||
C_FLOAT_NULL = float('nan')
|
C_FLOAT_NULL = float('nan')
|
||||||
C_DOUBLE_NULL = float('nan')
|
C_DOUBLE_NULL = float('nan')
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
||||||
|
|
|
@ -128,8 +128,8 @@ class TDengineCursor(object):
|
||||||
if errno == 0:
|
if errno == 0:
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
if CTaosInterface.fieldsCount(self._result) == 0:
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
self._affected_rows += CTaosInterface.affectedRows(
|
||||||
self._result )
|
self._result)
|
||||||
return CTaosInterface.affectedRows(self._result )
|
return CTaosInterface.affectedRows(self._result)
|
||||||
else:
|
else:
|
||||||
self._fields = CTaosInterface.useResult(
|
self._fields = CTaosInterface.useResult(
|
||||||
self._result)
|
self._result)
|
||||||
|
@ -148,6 +148,7 @@ class TDengineCursor(object):
|
||||||
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def fetchmany(self):
|
def fetchmany(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -158,11 +159,26 @@ class TDengineCursor(object):
|
||||||
if (dataType.upper() == "TINYINT"):
|
if (dataType.upper() == "TINYINT"):
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT):
|
if (self._description[col][1] == FieldType.C_TINYINT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "TINYINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "INT"):
|
if (dataType.upper() == "INT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_INT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "INT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "BIGINT"):
|
if (dataType.upper() == "BIGINT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_BIGINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "BIGINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
|
||||||
return True
|
return True
|
||||||
if (dataType.upper() == "FLOAT"):
|
if (dataType.upper() == "FLOAT"):
|
||||||
if (self._description[col][1] == FieldType.C_FLOAT):
|
if (self._description[col][1] == FieldType.C_FLOAT):
|
||||||
|
@ -191,16 +207,20 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchRow(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
if num_of_fields == 0:
|
if num_of_fields == 0:
|
||||||
break
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
for i in range(len(self._fields)):
|
for i in range(len(self._fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
def fetchall(self):
|
def fetchall(self):
|
||||||
if self._result is None or self._fields is None:
|
if self._result is None or self._fields is None:
|
||||||
raise OperationalError("Invalid use of fetchall")
|
raise OperationalError("Invalid use of fetchall")
|
||||||
|
@ -208,16 +228,20 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
if num_of_fields == 0:
|
if num_of_fields == 0:
|
||||||
break
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
for i in range(len(self._fields)):
|
for i in range(len(self._fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
def nextset(self):
|
def nextset(self):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
class DBAPITypeObject(object):
|
||||||
def __init__(self, *values):
|
def __init__(self, *values):
|
||||||
self.values = values
|
self.values = values
|
||||||
|
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
|
||||||
else:
|
else:
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
Date = datetime.date
|
||||||
Time = datetime.time
|
Time = datetime.time
|
||||||
Timestamp = datetime.datetime
|
Timestamp = datetime.datetime
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
def DataFromTicks(ticks):
|
||||||
return Date(*time.localtime(ticks)[:3])
|
return Date(*time.localtime(ticks)[:3])
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
def TimeFromTicks(ticks):
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
return Time(*time.localtime(ticks)[3:6])
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
def TimestampFromTicks(ticks):
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
return Timestamp(*time.localtime(ticks)[:6])
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
Binary = bytes
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
||||||
# ROWID = DBAPITypeObject()
|
# ROWID = DBAPITypeObject()
|
||||||
|
|
|
@ -1,35 +1,41 @@
|
||||||
"""Python exceptions
|
"""Python exceptions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
def __init__(self, msg=None, errno=None):
|
def __init__(self, msg=None, errno=None):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self._full_msg = self.msg
|
self._full_msg = self.msg
|
||||||
self.errno = errno
|
self.errno = errno
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._full_msg
|
return self._full_msg
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
class Warning(Exception):
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
"""Exception raised for important warnings like data truncations while inserting.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
class InterfaceError(Error):
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
class DatabaseError(Error):
|
||||||
"""Exception raised for errors that are related to the database.
|
"""Exception raised for errors that are related to the database.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
class DataError(DatabaseError):
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
class OperationalError(DatabaseError):
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
||||||
"""
|
"""
|
||||||
|
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
class InternalError(DatabaseError):
|
||||||
"""Exception raised when the database encounters an internal error.
|
"""Exception raised when the database encounters an internal error.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
class ProgrammingError(DatabaseError):
|
||||||
"""Exception raised for programming errors.
|
"""Exception raised for programming errors.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
class NotSupportedError(DatabaseError):
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,52 +1,57 @@
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
from .error import *
|
from .error import *
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
class TDengineSubscription(object):
|
||||||
"""TDengine subscription object
|
"""TDengine subscription object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, sub):
|
def __init__(self, sub):
|
||||||
self._sub = sub
|
self._sub = sub
|
||||||
|
|
||||||
|
|
||||||
def consume(self):
|
def consume(self):
|
||||||
"""Consume rows of a subscription
|
"""Consume rows of a subscription
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
raise OperationalError("Invalid use of consume")
|
raise OperationalError("Invalid use of consume")
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
result, fields = CTaosInterface.consume(self._sub)
|
||||||
buffer = [[] for i in range(len(fields))]
|
buffer = [[] for i in range(len(fields))]
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
||||||
if num_of_fields == 0: break
|
if num_of_fields == 0:
|
||||||
|
break
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
|
|
||||||
self.fields = fields
|
self.fields = fields
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
|
def close(self, keepProgress=True):
|
||||||
def close(self, keepProgress = True):
|
|
||||||
"""Close the Subscription.
|
"""Close the Subscription.
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from .connection import TDengineConnection
|
from .connection import TDengineConnection
|
||||||
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test")
|
conn = TDengineConnection(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
database="test")
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
# Generate a cursor object to run SQL commands
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
||||||
|
|
||||||
for i in range(0,10):
|
for i in range(0, 10):
|
||||||
data = sub.consume()
|
data = sub.consume()
|
||||||
for d in data:
|
for d in data:
|
||||||
print(d)
|
print(d)
|
||||||
|
|
||||||
sub.close()
|
sub.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.4",
|
version="2.0.5",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -3,12 +3,12 @@ from .connection import TDengineConnection
|
||||||
from .cursor import TDengineCursor
|
from .cursor import TDengineCursor
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
apilevel = '2.0.3'
|
|
||||||
threadsafety = 0
|
threadsafety = 0
|
||||||
paramstyle = 'pyformat'
|
paramstyle = 'pyformat'
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
__all__ = ['connection', 'cursor']
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
def connect(*args, **kwargs):
|
||||||
""" Function to return a TDengine connector object
|
""" Function to return a TDengine connector object
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,14 @@ from .error import *
|
||||||
import math
|
import math
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
def _convert_millisecond_to_datetime(milli):
|
||||||
return datetime.datetime.fromtimestamp(milli/1000.0)
|
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
def _convert_microsecond_to_datetime(micro):
|
||||||
return datetime.datetime.fromtimestamp(micro/1000000.0)
|
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
|
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_bool))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C tinyint row to python row
|
"""Function to convert C tinyint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_tinyint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C tinyint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C smallint row to python row
|
"""Function to convert C smallint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_smallint_unsigned_to_python(
|
||||||
|
data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C smallint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C int row to python row
|
"""Function to convert C int row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C int row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bigint row to python row
|
"""Function to convert C bigint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_bigint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C bigint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C float row to python row
|
"""Function to convert C float row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C double row to python row
|
"""Function to convert C double row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
tmpstr = ctypes.c_char_p(data)
|
tmpstr = ctypes.c_char_p(data)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
else:
|
else:
|
||||||
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i + 2,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
_CONVERT_FUNC = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
FieldType.C_BINARY: _crow_binary_to_python,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python
|
FieldType.C_NCHAR: _crow_nchar_to_python,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
_CONVERT_FUNC_BLOCK = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
FieldType.C_BINARY: _crow_binary_to_python_block,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python_block
|
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
# Corresponding TAOS_FIELD structure in C
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
class TaosField(ctypes.Structure):
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
_fields_ = [('name', ctypes.c_char * 65),
|
||||||
('type', ctypes.c_char),
|
('type', ctypes.c_char),
|
||||||
('bytes', ctypes.c_short)]
|
('bytes', ctypes.c_short)]
|
||||||
|
|
||||||
# C interface class
|
# C interface class
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
class CTaosInterface(object):
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.so')
|
libtaos = ctypes.CDLL('libtaos.so')
|
||||||
|
@ -216,7 +360,7 @@ class CTaosInterface(object):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("config is expected as a str")
|
raise AttributeError("config is expected as a str")
|
||||||
|
|
||||||
if config != None:
|
if config is not None:
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
CTaosInterface.libtaos.taos_options(3, self._config)
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
CTaosInterface.libtaos.taos_init()
|
||||||
|
@ -227,7 +371,13 @@ class CTaosInterface(object):
|
||||||
"""
|
"""
|
||||||
return self._config
|
return self._config
|
||||||
|
|
||||||
def connect(self, host=None, user="root", password="taosdata", db=None, port=0):
|
def connect(
|
||||||
|
self,
|
||||||
|
host=None,
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
db=None,
|
||||||
|
port=0):
|
||||||
'''
|
'''
|
||||||
Function to connect to server
|
Function to connect to server
|
||||||
|
|
||||||
|
@ -236,7 +386,7 @@ class CTaosInterface(object):
|
||||||
# host
|
# host
|
||||||
try:
|
try:
|
||||||
_host = ctypes.c_char_p(host.encode(
|
_host = ctypes.c_char_p(host.encode(
|
||||||
"utf-8")) if host != None else ctypes.c_char_p(None)
|
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("host is expected as a str")
|
raise AttributeError("host is expected as a str")
|
||||||
|
|
||||||
|
@ -255,7 +405,7 @@ class CTaosInterface(object):
|
||||||
# db
|
# db
|
||||||
try:
|
try:
|
||||||
_db = ctypes.c_char_p(
|
_db = ctypes.c_char_p(
|
||||||
db.encode("utf-8")) if db != None else ctypes.c_char_p(None)
|
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("db is expected as a str")
|
raise AttributeError("db is expected as a str")
|
||||||
|
|
||||||
|
@ -268,11 +418,11 @@ class CTaosInterface(object):
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
||||||
_host, _user, _password, _db, _port))
|
_host, _user, _password, _db, _port))
|
||||||
|
|
||||||
if connection.value == None:
|
if connection.value is None:
|
||||||
print('connect to TDengine failed')
|
print('connect to TDengine failed')
|
||||||
raise ConnectionError("connect to TDengine failed")
|
raise ConnectionError("connect to TDengine failed")
|
||||||
# sys.exit(1)
|
# sys.exit(1)
|
||||||
#else:
|
# else:
|
||||||
# print('connect to TDengine success')
|
# print('connect to TDengine success')
|
||||||
|
|
||||||
return connection
|
return connection
|
||||||
|
@ -293,7 +443,8 @@ class CTaosInterface(object):
|
||||||
@rtype: 0 on success and -1 on failure
|
@rtype: 0 on success and -1 on failure
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8')))
|
return CTaosInterface.libtaos.taos_query(
|
||||||
|
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("sql is expected as a string")
|
raise AttributeError("sql is expected as a string")
|
||||||
# finally:
|
# finally:
|
||||||
|
@ -308,7 +459,7 @@ class CTaosInterface(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
def subscribe(connection, restart, topic, sql, interval):
|
||||||
"""Create a subscription
|
"""Create a subscription
|
||||||
@restart boolean,
|
@restart boolean,
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
@sql string, sql statement for data query, must be a 'select' statement.
|
||||||
@topic string, name of this subscription
|
@topic string, name of this subscription
|
||||||
"""
|
"""
|
||||||
|
@ -360,35 +511,49 @@ class CTaosInterface(object):
|
||||||
result, ctypes.byref(pblock))
|
result, ctypes.byref(pblock))
|
||||||
if num_of_rows == 0:
|
if num_of_rows == 0:
|
||||||
return None, 0
|
return None, 0
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError("Invalid data type returned from database")
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fetchRow(result, fields):
|
def fetchRow(result, fields):
|
||||||
pblock = ctypes.c_void_p(0)
|
pblock = ctypes.c_void_p(0)
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
||||||
if pblock :
|
if pblock:
|
||||||
num_of_rows = 1
|
num_of_rows = 1
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
if fields[i]['type'] not in _CONVERT_FUNC:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError(
|
||||||
|
"Invalid data type returned from database")
|
||||||
if data is None:
|
if data is None:
|
||||||
blocks[i] = [None]
|
blocks[i] = [None]
|
||||||
else:
|
else:
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
else:
|
else:
|
||||||
return None, 0
|
return None, 0
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
|
@ -2,9 +2,11 @@ from .cursor import TDengineCursor
|
||||||
from .subscription import TDengineSubscription
|
from .subscription import TDengineSubscription
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
class TDengineConnection(object):
|
||||||
""" TDengine connection object
|
""" TDengine connection object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self._conn = None
|
self._conn = None
|
||||||
self._host = None
|
self._host = None
|
||||||
|
@ -29,7 +31,7 @@ class TDengineConnection(object):
|
||||||
# password
|
# password
|
||||||
if 'password' in kwargs:
|
if 'password' in kwargs:
|
||||||
self._password = kwargs['password']
|
self._password = kwargs['password']
|
||||||
|
|
||||||
# database
|
# database
|
||||||
if 'database' in kwargs:
|
if 'database' in kwargs:
|
||||||
self._database = kwargs['database']
|
self._database = kwargs['database']
|
||||||
|
@ -43,7 +45,12 @@ class TDengineConnection(object):
|
||||||
self._config = kwargs['config']
|
self._config = kwargs['config']
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
self._chandle = CTaosInterface(self._config)
|
||||||
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port)
|
self._conn = self._chandle.connect(
|
||||||
|
self._host,
|
||||||
|
self._user,
|
||||||
|
self._password,
|
||||||
|
self._database,
|
||||||
|
self._port)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close current connection.
|
"""Close current connection.
|
||||||
|
@ -55,7 +62,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
if self._conn is None:
|
if self._conn is None:
|
||||||
return None
|
return None
|
||||||
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval)
|
sub = CTaosInterface.subscribe(
|
||||||
|
self._conn, restart, topic, sql, interval)
|
||||||
return TDengineSubscription(sub)
|
return TDengineSubscription(sub)
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
|
@ -80,7 +88,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
conn = TDengineConnection(host='192.168.1.107')
|
||||||
conn.close()
|
conn.close()
|
||||||
print("Hello world")
|
print("Hello world")
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
from .dbapi import *
|
from .dbapi import *
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
class FieldType(object):
|
||||||
"""TDengine Field Types
|
"""TDengine Field Types
|
||||||
"""
|
"""
|
||||||
|
@ -18,13 +19,21 @@ class FieldType(object):
|
||||||
C_BINARY = 8
|
C_BINARY = 8
|
||||||
C_TIMESTAMP = 9
|
C_TIMESTAMP = 9
|
||||||
C_NCHAR = 10
|
C_NCHAR = 10
|
||||||
|
C_TINYINT_UNSIGNED = 12
|
||||||
|
C_SMALLINT_UNSIGNED = 13
|
||||||
|
C_INT_UNSIGNED = 14
|
||||||
|
C_BIGINT_UNSIGNED = 15
|
||||||
# NULL value definition
|
# NULL value definition
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
# NOTE: These values should change according to C definition in tsdb.h
|
||||||
C_BOOL_NULL = 0x02
|
C_BOOL_NULL = 0x02
|
||||||
C_TINYINT_NULL = -128
|
C_TINYINT_NULL = -128
|
||||||
|
C_TINYINT_UNSIGNED_NULL = 255
|
||||||
C_SMALLINT_NULL = -32768
|
C_SMALLINT_NULL = -32768
|
||||||
|
C_SMALLINT_UNSIGNED_NULL = 65535
|
||||||
C_INT_NULL = -2147483648
|
C_INT_NULL = -2147483648
|
||||||
|
C_INT_UNSIGNED_NULL = 4294967295
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
C_BIGINT_NULL = -9223372036854775808
|
||||||
|
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
||||||
C_FLOAT_NULL = float('nan')
|
C_FLOAT_NULL = float('nan')
|
||||||
C_DOUBLE_NULL = float('nan')
|
C_DOUBLE_NULL = float('nan')
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
||||||
|
|
|
@ -5,6 +5,7 @@ import threading
|
||||||
|
|
||||||
# querySeqNum = 0
|
# querySeqNum = 0
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
class TDengineCursor(object):
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
"""Database cursor which is used to manage the context of a fetch operation.
|
||||||
|
|
||||||
|
@ -107,8 +108,8 @@ class TDengineCursor(object):
|
||||||
# if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
# raise OperationalError(info)
|
# raise OperationalError(info)
|
||||||
# print(info)
|
# print(info)
|
||||||
# return None
|
# return None
|
||||||
|
|
||||||
if not operation:
|
if not operation:
|
||||||
return None
|
return None
|
||||||
|
@ -137,8 +138,8 @@ class TDengineCursor(object):
|
||||||
if errno == 0:
|
if errno == 0:
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
if CTaosInterface.fieldsCount(self._result) == 0:
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
self._affected_rows += CTaosInterface.affectedRows(
|
||||||
self._result )
|
self._result)
|
||||||
return CTaosInterface.affectedRows(self._result )
|
return CTaosInterface.affectedRows(self._result)
|
||||||
else:
|
else:
|
||||||
self._fields = CTaosInterface.useResult(
|
self._fields = CTaosInterface.useResult(
|
||||||
self._result)
|
self._result)
|
||||||
|
@ -168,11 +169,26 @@ class TDengineCursor(object):
|
||||||
if (dataType.upper() == "TINYINT"):
|
if (dataType.upper() == "TINYINT"):
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT):
|
if (self._description[col][1] == FieldType.C_TINYINT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "TINYINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "INT"):
|
if (dataType.upper() == "INT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_INT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "INT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "BIGINT"):
|
if (dataType.upper() == "BIGINT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_BIGINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "BIGINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
|
||||||
return True
|
return True
|
||||||
if (dataType.upper() == "FLOAT"):
|
if (dataType.upper() == "FLOAT"):
|
||||||
if (self._description[col][1] == FieldType.C_FLOAT):
|
if (self._description[col][1] == FieldType.C_FLOAT):
|
||||||
|
@ -201,10 +217,13 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchRow(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
if num_of_fields == 0:
|
if num_of_fields == 0:
|
||||||
break
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
|
@ -219,15 +238,20 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
if num_of_fields == 0: break
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
|
if num_of_fields == 0:
|
||||||
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
for i in range(len(self._fields)):
|
for i in range(len(self._fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
def nextset(self):
|
def nextset(self):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
@ -259,8 +283,8 @@ class TDengineCursor(object):
|
||||||
# if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
# raise OperationalError(info)
|
# raise OperationalError(info)
|
||||||
# print(info)
|
# print(info)
|
||||||
# return None
|
# return None
|
||||||
|
|
||||||
self._description = []
|
self._description = []
|
||||||
for ele in self._fields:
|
for ele in self._fields:
|
||||||
|
@ -268,4 +292,3 @@ class TDengineCursor(object):
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
(ele['name'], ele['type'], None, None, None, None, False))
|
||||||
|
|
||||||
return self._result
|
return self._result
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
class DBAPITypeObject(object):
|
||||||
def __init__(self, *values):
|
def __init__(self, *values):
|
||||||
self.values = values
|
self.values = values
|
||||||
|
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
|
||||||
else:
|
else:
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
Date = datetime.date
|
||||||
Time = datetime.time
|
Time = datetime.time
|
||||||
Timestamp = datetime.datetime
|
Timestamp = datetime.datetime
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
def DataFromTicks(ticks):
|
||||||
return Date(*time.localtime(ticks)[:3])
|
return Date(*time.localtime(ticks)[:3])
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
def TimeFromTicks(ticks):
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
return Time(*time.localtime(ticks)[3:6])
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
def TimestampFromTicks(ticks):
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
return Timestamp(*time.localtime(ticks)[:6])
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
Binary = bytes
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
||||||
# ROWID = DBAPITypeObject()
|
# ROWID = DBAPITypeObject()
|
||||||
|
|
|
@ -1,35 +1,41 @@
|
||||||
"""Python exceptions
|
"""Python exceptions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
def __init__(self, msg=None, errno=None):
|
def __init__(self, msg=None, errno=None):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self._full_msg = self.msg
|
self._full_msg = self.msg
|
||||||
self.errno = errno
|
self.errno = errno
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._full_msg
|
return self._full_msg
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
class Warning(Exception):
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
"""Exception raised for important warnings like data truncations while inserting.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
class InterfaceError(Error):
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
class DatabaseError(Error):
|
||||||
"""Exception raised for errors that are related to the database.
|
"""Exception raised for errors that are related to the database.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
class DataError(DatabaseError):
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
class OperationalError(DatabaseError):
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
||||||
"""
|
"""
|
||||||
|
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
class InternalError(DatabaseError):
|
||||||
"""Exception raised when the database encounters an internal error.
|
"""Exception raised when the database encounters an internal error.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
class ProgrammingError(DatabaseError):
|
||||||
"""Exception raised for programming errors.
|
"""Exception raised for programming errors.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
class NotSupportedError(DatabaseError):
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,32 +1,33 @@
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
from .error import *
|
from .error import *
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
class TDengineSubscription(object):
|
||||||
"""TDengine subscription object
|
"""TDengine subscription object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, sub):
|
def __init__(self, sub):
|
||||||
self._sub = sub
|
self._sub = sub
|
||||||
|
|
||||||
|
|
||||||
def consume(self):
|
def consume(self):
|
||||||
"""Consume rows of a subscription
|
"""Consume rows of a subscription
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
raise OperationalError("Invalid use of consume")
|
raise OperationalError("Invalid use of consume")
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
result, fields = CTaosInterface.consume(self._sub)
|
||||||
buffer = [[] for i in range(len(fields))]
|
buffer = [[] for i in range(len(fields))]
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
||||||
if num_of_fields == 0: break
|
if num_of_fields == 0:
|
||||||
|
break
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
|
|
||||||
self.fields = fields
|
self.fields = fields
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
|
def close(self, keepProgress=True):
|
||||||
def close(self, keepProgress = True):
|
|
||||||
"""Close the Subscription.
|
"""Close the Subscription.
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
|
@ -38,15 +39,19 @@ class TDengineSubscription(object):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from .connection import TDengineConnection
|
from .connection import TDengineConnection
|
||||||
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test")
|
conn = TDengineConnection(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
database="test")
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
# Generate a cursor object to run SQL commands
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
||||||
|
|
||||||
for i in range(0,10):
|
for i in range(0, 10):
|
||||||
data = sub.consume()
|
data = sub.consume()
|
||||||
for d in data:
|
for d in data:
|
||||||
print(d)
|
print(d)
|
||||||
|
|
||||||
sub.close()
|
sub.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
|
@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="taos",
|
name="taos",
|
||||||
version="2.0.4",
|
version="2.0.5",
|
||||||
author="Taosdata Inc.",
|
author="Taosdata Inc.",
|
||||||
author_email="support@taosdata.com",
|
author_email="support@taosdata.com",
|
||||||
description="TDengine python client package",
|
description="TDengine python client package",
|
||||||
|
|
|
@ -3,12 +3,12 @@ from .connection import TDengineConnection
|
||||||
from .cursor import TDengineCursor
|
from .cursor import TDengineCursor
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
apilevel = '2.0.4'
|
|
||||||
threadsafety = 0
|
threadsafety = 0
|
||||||
paramstyle = 'pyformat'
|
paramstyle = 'pyformat'
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
__all__ = ['connection', 'cursor']
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
def connect(*args, **kwargs):
|
||||||
""" Function to return a TDengine connector object
|
""" Function to return a TDengine connector object
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,14 @@ from .error import *
|
||||||
import math
|
import math
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
def _convert_millisecond_to_datetime(milli):
|
||||||
return datetime.datetime.fromtimestamp(milli/1000.0)
|
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
def _convert_microsecond_to_datetime(micro):
|
||||||
return datetime.datetime.fromtimestamp(micro/1000000.0)
|
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
|
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
else:
|
else:
|
||||||
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
return list(map(_timestamp_converter, ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_bool))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C tinyint row to python row
|
"""Function to convert C tinyint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_tinyint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C tinyint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_byte))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C smallint row to python row
|
"""Function to convert C smallint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ]
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_smallint_unsigned_to_python(
|
||||||
|
data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C smallint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C int row to python row
|
"""Function to convert C int row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
|
"""Function to convert C int row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bigint row to python row
|
"""Function to convert C bigint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ]
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
|
def _crow_bigint_unsigned_to_python(
|
||||||
|
data,
|
||||||
|
num_of_rows,
|
||||||
|
nbytes=None,
|
||||||
|
micro=False):
|
||||||
|
"""Function to convert C bigint row to python row
|
||||||
|
"""
|
||||||
|
if num_of_rows > 0:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
else:
|
||||||
|
return [
|
||||||
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(
|
||||||
|
ctypes.c_long))[
|
||||||
|
:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C float row to python row
|
"""Function to convert C float row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C double row to python row
|
"""Function to convert C double row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ]
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
else:
|
else:
|
||||||
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
tmpstr = ctypes.c_char_p(data)
|
tmpstr = ctypes.c_char_p(data)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
else:
|
else:
|
||||||
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows > 0:
|
if num_of_rows > 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop()
|
rbyte = ctypes.cast(
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
data + nbytes * i,
|
||||||
res.append( tmpstr.value.decode()[0:rbyte] )
|
ctypes.POINTER(
|
||||||
|
ctypes.c_short))[
|
||||||
|
:1].pop()
|
||||||
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
|
res.append(tmpstr.value.decode()[0:rbyte])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C nchar row to python row
|
"""Function to convert C nchar row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res=[]
|
res = []
|
||||||
if num_of_rows >= 0:
|
if num_of_rows >= 0:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
tmpstr = ctypes.c_char_p(data+nbytes*i+2)
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
res.append( tmpstr.value.decode() )
|
res.append(tmpstr.value.decode())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
else:
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value )
|
res.append((ctypes.cast(data + nbytes * i + 2,
|
||||||
|
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
_CONVERT_FUNC = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
FieldType.C_BINARY: _crow_binary_to_python,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python
|
FieldType.C_NCHAR: _crow_nchar_to_python,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
_CONVERT_FUNC_BLOCK = {
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
FieldType.C_BOOL: _crow_bool_to_python,
|
||||||
FieldType.C_TINYINT : _crow_tinyint_to_python,
|
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
||||||
FieldType.C_SMALLINT : _crow_smallint_to_python,
|
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
||||||
FieldType.C_INT : _crow_int_to_python,
|
FieldType.C_INT: _crow_int_to_python,
|
||||||
FieldType.C_BIGINT : _crow_bigint_to_python,
|
FieldType.C_BIGINT: _crow_bigint_to_python,
|
||||||
FieldType.C_FLOAT : _crow_float_to_python,
|
FieldType.C_FLOAT: _crow_float_to_python,
|
||||||
FieldType.C_DOUBLE : _crow_double_to_python,
|
FieldType.C_DOUBLE: _crow_double_to_python,
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
FieldType.C_BINARY: _crow_binary_to_python_block,
|
||||||
FieldType.C_TIMESTAMP : _crow_timestamp_to_python,
|
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
||||||
FieldType.C_NCHAR : _crow_nchar_to_python_block
|
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
||||||
|
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
||||||
|
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
||||||
|
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
||||||
|
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
||||||
}
|
}
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
# Corresponding TAOS_FIELD structure in C
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
class TaosField(ctypes.Structure):
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
_fields_ = [('name', ctypes.c_char * 65),
|
||||||
('type', ctypes.c_char),
|
('type', ctypes.c_char),
|
||||||
('bytes', ctypes.c_short)]
|
('bytes', ctypes.c_short)]
|
||||||
|
|
||||||
# C interface class
|
# C interface class
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
class CTaosInterface(object):
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.dylib')
|
libtaos = ctypes.CDLL('libtaos.dylib')
|
||||||
|
@ -216,7 +360,7 @@ class CTaosInterface(object):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("config is expected as a str")
|
raise AttributeError("config is expected as a str")
|
||||||
|
|
||||||
if config != None:
|
if config is not None:
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
CTaosInterface.libtaos.taos_options(3, self._config)
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
CTaosInterface.libtaos.taos_init()
|
||||||
|
@ -227,7 +371,13 @@ class CTaosInterface(object):
|
||||||
"""
|
"""
|
||||||
return self._config
|
return self._config
|
||||||
|
|
||||||
def connect(self, host=None, user="root", password="taosdata", db=None, port=0):
|
def connect(
|
||||||
|
self,
|
||||||
|
host=None,
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
db=None,
|
||||||
|
port=0):
|
||||||
'''
|
'''
|
||||||
Function to connect to server
|
Function to connect to server
|
||||||
|
|
||||||
|
@ -236,7 +386,7 @@ class CTaosInterface(object):
|
||||||
# host
|
# host
|
||||||
try:
|
try:
|
||||||
_host = ctypes.c_char_p(host.encode(
|
_host = ctypes.c_char_p(host.encode(
|
||||||
"utf-8")) if host != None else ctypes.c_char_p(None)
|
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("host is expected as a str")
|
raise AttributeError("host is expected as a str")
|
||||||
|
|
||||||
|
@ -255,7 +405,7 @@ class CTaosInterface(object):
|
||||||
# db
|
# db
|
||||||
try:
|
try:
|
||||||
_db = ctypes.c_char_p(
|
_db = ctypes.c_char_p(
|
||||||
db.encode("utf-8")) if db != None else ctypes.c_char_p(None)
|
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("db is expected as a str")
|
raise AttributeError("db is expected as a str")
|
||||||
|
|
||||||
|
@ -268,11 +418,11 @@ class CTaosInterface(object):
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
||||||
_host, _user, _password, _db, _port))
|
_host, _user, _password, _db, _port))
|
||||||
|
|
||||||
if connection.value == None:
|
if connection.value is None:
|
||||||
print('connect to TDengine failed')
|
print('connect to TDengine failed')
|
||||||
raise ConnectionError("connect to TDengine failed")
|
raise ConnectionError("connect to TDengine failed")
|
||||||
# sys.exit(1)
|
# sys.exit(1)
|
||||||
#else:
|
# else:
|
||||||
# print('connect to TDengine success')
|
# print('connect to TDengine success')
|
||||||
|
|
||||||
return connection
|
return connection
|
||||||
|
@ -293,7 +443,8 @@ class CTaosInterface(object):
|
||||||
@rtype: 0 on success and -1 on failure
|
@rtype: 0 on success and -1 on failure
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8')))
|
return CTaosInterface.libtaos.taos_query(
|
||||||
|
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise AttributeError("sql is expected as a string")
|
raise AttributeError("sql is expected as a string")
|
||||||
# finally:
|
# finally:
|
||||||
|
@ -308,7 +459,7 @@ class CTaosInterface(object):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
def subscribe(connection, restart, topic, sql, interval):
|
||||||
"""Create a subscription
|
"""Create a subscription
|
||||||
@restart boolean,
|
@restart boolean,
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
@sql string, sql statement for data query, must be a 'select' statement.
|
||||||
@topic string, name of this subscription
|
@topic string, name of this subscription
|
||||||
"""
|
"""
|
||||||
|
@ -360,35 +511,49 @@ class CTaosInterface(object):
|
||||||
result, ctypes.byref(pblock))
|
result, ctypes.byref(pblock))
|
||||||
if num_of_rows == 0:
|
if num_of_rows == 0:
|
||||||
return None, 0
|
return None, 0
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError("Invalid data type returned from database")
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def fetchRow(result, fields):
|
def fetchRow(result, fields):
|
||||||
pblock = ctypes.c_void_p(0)
|
pblock = ctypes.c_void_p(0)
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
||||||
if pblock :
|
if pblock:
|
||||||
num_of_rows = 1
|
num_of_rows = 1
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO)
|
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
||||||
|
result) == FieldType.C_TIMESTAMP_MICRO)
|
||||||
blocks = [None] * len(fields)
|
blocks = [None] * len(fields)
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
||||||
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]]
|
fieldLen = [
|
||||||
|
ele for ele in ctypes.cast(
|
||||||
|
fieldL, ctypes.POINTER(
|
||||||
|
ctypes.c_int))[
|
||||||
|
:len(fields)]]
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
if fields[i]['type'] not in _CONVERT_FUNC:
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
raise DatabaseError(
|
||||||
|
"Invalid data type returned from database")
|
||||||
if data is None:
|
if data is None:
|
||||||
blocks[i] = [None]
|
blocks[i] = [None]
|
||||||
else:
|
else:
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro)
|
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
||||||
|
data, num_of_rows, fieldLen[i], isMicro)
|
||||||
else:
|
else:
|
||||||
return None, 0
|
return None, 0
|
||||||
return blocks, abs(num_of_rows)
|
return blocks, abs(num_of_rows)
|
||||||
|
|
|
@ -2,9 +2,11 @@ from .cursor import TDengineCursor
|
||||||
from .subscription import TDengineSubscription
|
from .subscription import TDengineSubscription
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
class TDengineConnection(object):
|
||||||
""" TDengine connection object
|
""" TDengine connection object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self._conn = None
|
self._conn = None
|
||||||
self._host = None
|
self._host = None
|
||||||
|
@ -29,7 +31,7 @@ class TDengineConnection(object):
|
||||||
# password
|
# password
|
||||||
if 'password' in kwargs:
|
if 'password' in kwargs:
|
||||||
self._password = kwargs['password']
|
self._password = kwargs['password']
|
||||||
|
|
||||||
# database
|
# database
|
||||||
if 'database' in kwargs:
|
if 'database' in kwargs:
|
||||||
self._database = kwargs['database']
|
self._database = kwargs['database']
|
||||||
|
@ -43,7 +45,12 @@ class TDengineConnection(object):
|
||||||
self._config = kwargs['config']
|
self._config = kwargs['config']
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
self._chandle = CTaosInterface(self._config)
|
||||||
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port)
|
self._conn = self._chandle.connect(
|
||||||
|
self._host,
|
||||||
|
self._user,
|
||||||
|
self._password,
|
||||||
|
self._database,
|
||||||
|
self._port)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close current connection.
|
"""Close current connection.
|
||||||
|
@ -55,7 +62,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
if self._conn is None:
|
if self._conn is None:
|
||||||
return None
|
return None
|
||||||
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval)
|
sub = CTaosInterface.subscribe(
|
||||||
|
self._conn, restart, topic, sql, interval)
|
||||||
return TDengineSubscription(sub)
|
return TDengineSubscription(sub)
|
||||||
|
|
||||||
def cursor(self):
|
def cursor(self):
|
||||||
|
@ -80,7 +88,8 @@ class TDengineConnection(object):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
conn = TDengineConnection(host='192.168.1.107')
|
||||||
conn.close()
|
conn.close()
|
||||||
print("Hello world")
|
print("Hello world")
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
from .dbapi import *
|
from .dbapi import *
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
class FieldType(object):
|
||||||
"""TDengine Field Types
|
"""TDengine Field Types
|
||||||
"""
|
"""
|
||||||
|
@ -18,13 +19,21 @@ class FieldType(object):
|
||||||
C_BINARY = 8
|
C_BINARY = 8
|
||||||
C_TIMESTAMP = 9
|
C_TIMESTAMP = 9
|
||||||
C_NCHAR = 10
|
C_NCHAR = 10
|
||||||
|
C_TINYINT_UNSIGNED = 12
|
||||||
|
C_SMALLINT_UNSIGNED = 13
|
||||||
|
C_INT_UNSIGNED = 14
|
||||||
|
C_BIGINT_UNSIGNED = 15
|
||||||
# NULL value definition
|
# NULL value definition
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
# NOTE: These values should change according to C definition in tsdb.h
|
||||||
C_BOOL_NULL = 0x02
|
C_BOOL_NULL = 0x02
|
||||||
C_TINYINT_NULL = -128
|
C_TINYINT_NULL = -128
|
||||||
|
C_TINYINT_UNSIGNED_NULL = 255
|
||||||
C_SMALLINT_NULL = -32768
|
C_SMALLINT_NULL = -32768
|
||||||
|
C_SMALLINT_UNSIGNED_NULL = 65535
|
||||||
C_INT_NULL = -2147483648
|
C_INT_NULL = -2147483648
|
||||||
|
C_INT_UNSIGNED_NULL = 4294967295
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
C_BIGINT_NULL = -9223372036854775808
|
||||||
|
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
||||||
C_FLOAT_NULL = float('nan')
|
C_FLOAT_NULL = float('nan')
|
||||||
C_DOUBLE_NULL = float('nan')
|
C_DOUBLE_NULL = float('nan')
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
||||||
|
|
|
@ -5,6 +5,7 @@ import threading
|
||||||
|
|
||||||
# querySeqNum = 0
|
# querySeqNum = 0
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
class TDengineCursor(object):
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
"""Database cursor which is used to manage the context of a fetch operation.
|
||||||
|
|
||||||
|
@ -107,8 +108,8 @@ class TDengineCursor(object):
|
||||||
# if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
# raise OperationalError(info)
|
# raise OperationalError(info)
|
||||||
# print(info)
|
# print(info)
|
||||||
# return None
|
# return None
|
||||||
|
|
||||||
if not operation:
|
if not operation:
|
||||||
return None
|
return None
|
||||||
|
@ -137,8 +138,8 @@ class TDengineCursor(object):
|
||||||
if errno == 0:
|
if errno == 0:
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
if CTaosInterface.fieldsCount(self._result) == 0:
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
self._affected_rows += CTaosInterface.affectedRows(
|
||||||
self._result )
|
self._result)
|
||||||
return CTaosInterface.affectedRows(self._result )
|
return CTaosInterface.affectedRows(self._result)
|
||||||
else:
|
else:
|
||||||
self._fields = CTaosInterface.useResult(
|
self._fields = CTaosInterface.useResult(
|
||||||
self._result)
|
self._result)
|
||||||
|
@ -168,11 +169,26 @@ class TDengineCursor(object):
|
||||||
if (dataType.upper() == "TINYINT"):
|
if (dataType.upper() == "TINYINT"):
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT):
|
if (self._description[col][1] == FieldType.C_TINYINT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "TINYINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "SMALLINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "INT"):
|
if (dataType.upper() == "INT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_INT):
|
||||||
return True
|
return True
|
||||||
|
if (dataType.upper() == "INT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
|
||||||
|
return True
|
||||||
if (dataType.upper() == "BIGINT"):
|
if (dataType.upper() == "BIGINT"):
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
if (self._description[col][1] == FieldType.C_BIGINT):
|
||||||
|
return True
|
||||||
|
if (dataType.upper() == "BIGINT UNSIGNED"):
|
||||||
|
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
|
||||||
return True
|
return True
|
||||||
if (dataType.upper() == "FLOAT"):
|
if (dataType.upper() == "FLOAT"):
|
||||||
if (self._description[col][1] == FieldType.C_FLOAT):
|
if (self._description[col][1] == FieldType.C_FLOAT):
|
||||||
|
@ -201,10 +217,13 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchRow(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
if num_of_fields == 0:
|
if num_of_fields == 0:
|
||||||
break
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
|
@ -219,15 +238,20 @@ class TDengineCursor(object):
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
buffer = [[] for i in range(len(self._fields))]
|
||||||
self._rowcount = 0
|
self._rowcount = 0
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(
|
||||||
|
self._result, self._fields)
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
||||||
if errno != 0:
|
if errno != 0:
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
raise ProgrammingError(
|
||||||
if num_of_fields == 0: break
|
CTaosInterface.errStr(
|
||||||
|
self._result), errno)
|
||||||
|
if num_of_fields == 0:
|
||||||
|
break
|
||||||
self._rowcount += num_of_fields
|
self._rowcount += num_of_fields
|
||||||
for i in range(len(self._fields)):
|
for i in range(len(self._fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
def nextset(self):
|
def nextset(self):
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
|
@ -259,8 +283,8 @@ class TDengineCursor(object):
|
||||||
# if threading.get_ident() != self._threadId:
|
# if threading.get_ident() != self._threadId:
|
||||||
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
|
||||||
# raise OperationalError(info)
|
# raise OperationalError(info)
|
||||||
# print(info)
|
# print(info)
|
||||||
# return None
|
# return None
|
||||||
|
|
||||||
self._description = []
|
self._description = []
|
||||||
for ele in self._fields:
|
for ele in self._fields:
|
||||||
|
@ -268,4 +292,3 @@ class TDengineCursor(object):
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
(ele['name'], ele['type'], None, None, None, None, False))
|
||||||
|
|
||||||
return self._result
|
return self._result
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
class DBAPITypeObject(object):
|
||||||
def __init__(self, *values):
|
def __init__(self, *values):
|
||||||
self.values = values
|
self.values = values
|
||||||
|
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
|
||||||
else:
|
else:
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
Date = datetime.date
|
||||||
Time = datetime.time
|
Time = datetime.time
|
||||||
Timestamp = datetime.datetime
|
Timestamp = datetime.datetime
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
def DataFromTicks(ticks):
|
||||||
return Date(*time.localtime(ticks)[:3])
|
return Date(*time.localtime(ticks)[:3])
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
def TimeFromTicks(ticks):
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
return Time(*time.localtime(ticks)[3:6])
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
def TimestampFromTicks(ticks):
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
return Timestamp(*time.localtime(ticks)[:6])
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
Binary = bytes
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
||||||
# ROWID = DBAPITypeObject()
|
# ROWID = DBAPITypeObject()
|
||||||
|
|
|
@ -1,35 +1,41 @@
|
||||||
"""Python exceptions
|
"""Python exceptions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
def __init__(self, msg=None, errno=None):
|
def __init__(self, msg=None, errno=None):
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
self._full_msg = self.msg
|
self._full_msg = self.msg
|
||||||
self.errno = errno
|
self.errno = errno
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self._full_msg
|
return self._full_msg
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
class Warning(Exception):
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
"""Exception raised for important warnings like data truncations while inserting.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
class InterfaceError(Error):
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
class DatabaseError(Error):
|
||||||
"""Exception raised for errors that are related to the database.
|
"""Exception raised for errors that are related to the database.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
class DataError(DatabaseError):
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
class OperationalError(DatabaseError):
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
||||||
"""
|
"""
|
||||||
|
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
class InternalError(DatabaseError):
|
||||||
"""Exception raised when the database encounters an internal error.
|
"""Exception raised when the database encounters an internal error.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
class ProgrammingError(DatabaseError):
|
||||||
"""Exception raised for programming errors.
|
"""Exception raised for programming errors.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
class NotSupportedError(DatabaseError):
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,32 +1,33 @@
|
||||||
from .cinterface import CTaosInterface
|
from .cinterface import CTaosInterface
|
||||||
from .error import *
|
from .error import *
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
class TDengineSubscription(object):
|
||||||
"""TDengine subscription object
|
"""TDengine subscription object
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, sub):
|
def __init__(self, sub):
|
||||||
self._sub = sub
|
self._sub = sub
|
||||||
|
|
||||||
|
|
||||||
def consume(self):
|
def consume(self):
|
||||||
"""Consume rows of a subscription
|
"""Consume rows of a subscription
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
raise OperationalError("Invalid use of consume")
|
raise OperationalError("Invalid use of consume")
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
result, fields = CTaosInterface.consume(self._sub)
|
||||||
buffer = [[] for i in range(len(fields))]
|
buffer = [[] for i in range(len(fields))]
|
||||||
while True:
|
while True:
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
||||||
if num_of_fields == 0: break
|
if num_of_fields == 0:
|
||||||
|
break
|
||||||
for i in range(len(fields)):
|
for i in range(len(fields)):
|
||||||
buffer[i].extend(block[i])
|
buffer[i].extend(block[i])
|
||||||
|
|
||||||
self.fields = fields
|
self.fields = fields
|
||||||
return list(map(tuple, zip(*buffer)))
|
return list(map(tuple, zip(*buffer)))
|
||||||
|
|
||||||
|
def close(self, keepProgress=True):
|
||||||
def close(self, keepProgress = True):
|
|
||||||
"""Close the Subscription.
|
"""Close the Subscription.
|
||||||
"""
|
"""
|
||||||
if self._sub is None:
|
if self._sub is None:
|
||||||
|
@ -38,15 +39,19 @@ class TDengineSubscription(object):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from .connection import TDengineConnection
|
from .connection import TDengineConnection
|
||||||
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test")
|
conn = TDengineConnection(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="taosdata",
|
||||||
|
database="test")
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
# Generate a cursor object to run SQL commands
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
||||||
|
|
||||||
for i in range(0,10):
|
for i in range(0, 10):
|
||||||
data = sub.consume()
|
data = sub.consume()
|
||||||
for d in data:
|
for d in data:
|
||||||
print(d)
|
print(d)
|
||||||
|
|
||||||
sub.close()
|
sub.close()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
|
@ -3,7 +3,6 @@ from .connection import TDengineConnection
|
||||||
from .cursor import TDengineCursor
|
from .cursor import TDengineCursor
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
apilevel = '2.0.3'
|
|
||||||
threadsafety = 0
|
threadsafety = 0
|
||||||
paramstyle = 'pyformat'
|
paramstyle = 'pyformat'
|
||||||
|
|
||||||
|
@ -21,4 +20,4 @@ def connect(*args, **kwargs):
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
@rtype: TDengineConnector
|
||||||
"""
|
"""
|
||||||
return TDengineConnection(*args, **kwargs)
|
return TDengineConnection(*args, **kwargs)
|
||||||
|
|
|
@ -3,7 +3,6 @@ from .connection import TDengineConnection
|
||||||
from .cursor import TDengineCursor
|
from .cursor import TDengineCursor
|
||||||
|
|
||||||
# Globals
|
# Globals
|
||||||
apilevel = '2.0.3'
|
|
||||||
threadsafety = 0
|
threadsafety = 0
|
||||||
paramstyle = 'pyformat'
|
paramstyle = 'pyformat'
|
||||||
|
|
||||||
|
@ -21,4 +20,4 @@ def connect(*args, **kwargs):
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
@rtype: TDengineConnector
|
||||||
"""
|
"""
|
||||||
return TDengineConnection(*args, **kwargs)
|
return TDengineConnection(*args, **kwargs)
|
||||||
|
|
|
@ -68,7 +68,7 @@ typedef struct taosField {
|
||||||
#define DLL_EXPORT
|
#define DLL_EXPORT
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
DLL_EXPORT void taos_init();
|
DLL_EXPORT int taos_init();
|
||||||
DLL_EXPORT void taos_cleanup(void);
|
DLL_EXPORT void taos_cleanup(void);
|
||||||
DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...);
|
DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...);
|
||||||
DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port);
|
DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port);
|
||||||
|
|
|
@ -122,8 +122,8 @@
|
||||||
#define TK_UNSIGNED 103
|
#define TK_UNSIGNED 103
|
||||||
#define TK_TAGS 104
|
#define TK_TAGS 104
|
||||||
#define TK_USING 105
|
#define TK_USING 105
|
||||||
#define TK_AS 106
|
#define TK_COMMA 106
|
||||||
#define TK_COMMA 107
|
#define TK_AS 107
|
||||||
#define TK_NULL 108
|
#define TK_NULL 108
|
||||||
#define TK_SELECT 109
|
#define TK_SELECT 109
|
||||||
#define TK_UNION 110
|
#define TK_UNION 110
|
||||||
|
@ -228,6 +228,7 @@
|
||||||
#define TK_VALUES 209
|
#define TK_VALUES 209
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#define TK_SPACE 300
|
#define TK_SPACE 300
|
||||||
#define TK_COMMENT 301
|
#define TK_COMMENT 301
|
||||||
#define TK_ILLEGAL 302
|
#define TK_ILLEGAL 302
|
||||||
|
|
|
@ -76,7 +76,11 @@ TAOS *shellInit(SShellArguments *args) {
|
||||||
args->user = TSDB_DEFAULT_USER;
|
args->user = TSDB_DEFAULT_USER;
|
||||||
}
|
}
|
||||||
|
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
printf("failed to init taos\n");
|
||||||
|
fflush(stdout);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
// Connect to the database.
|
// Connect to the database.
|
||||||
TAOS *con = NULL;
|
TAOS *con = NULL;
|
||||||
|
|
|
@ -110,7 +110,10 @@ int main(int argc, char* argv[]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (args.netTestRole && args.netTestRole[0] != 0) {
|
if (args.netTestRole && args.netTestRole[0] != 0) {
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
printf("Failed to init taos");
|
||||||
|
exit(EXIT_FAILURE);
|
||||||
|
}
|
||||||
taosNetTest(args.netTestRole, args.host, args.port, args.pktLen);
|
taosNetTest(args.netTestRole, args.host, args.port, args.pktLen);
|
||||||
exit(0);
|
exit(0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1971,7 +1971,6 @@ static int createSuperTable(TAOS * taos, char* dbName, SSuperTable* superTbls,
|
||||||
static int createDatabases() {
|
static int createDatabases() {
|
||||||
TAOS * taos = NULL;
|
TAOS * taos = NULL;
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
taos_init();
|
|
||||||
taos = taos_connect(g_Dbs.host, g_Dbs.user, g_Dbs.password, NULL, g_Dbs.port);
|
taos = taos_connect(g_Dbs.host, g_Dbs.user, g_Dbs.password, NULL, g_Dbs.port);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
||||||
|
@ -4496,7 +4495,6 @@ void *subQueryProcess(void *sarg) {
|
||||||
|
|
||||||
int queryTestProcess() {
|
int queryTestProcess() {
|
||||||
TAOS * taos = NULL;
|
TAOS * taos = NULL;
|
||||||
taos_init();
|
|
||||||
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, NULL, g_queryInfo.port);
|
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, NULL, g_queryInfo.port);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
||||||
|
@ -4772,7 +4770,6 @@ int subscribeTestProcess() {
|
||||||
}
|
}
|
||||||
|
|
||||||
TAOS * taos = NULL;
|
TAOS * taos = NULL;
|
||||||
taos_init();
|
|
||||||
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, g_queryInfo.dbName, g_queryInfo.port);
|
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, g_queryInfo.dbName, g_queryInfo.port);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
|
||||||
|
|
|
@ -103,7 +103,9 @@ int32_t monInitSystem() {
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t monStartSystem() {
|
int32_t monStartSystem() {
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
tsMonitor.start = 1;
|
tsMonitor.start = 1;
|
||||||
monExecuteSQLFp = monExecuteSQL;
|
monExecuteSQLFp = monExecuteSQL;
|
||||||
monInfo("monitor module start");
|
monInfo("monitor module start");
|
||||||
|
|
|
@ -76,6 +76,7 @@ typedef struct SQuerySQL {
|
||||||
typedef struct SCreatedTableInfo {
|
typedef struct SCreatedTableInfo {
|
||||||
SStrToken name; // table name token
|
SStrToken name; // table name token
|
||||||
SStrToken stableName; // super table name token , for using clause
|
SStrToken stableName; // super table name token , for using clause
|
||||||
|
SArray *pTagNames; // create by using super table, tag name
|
||||||
SArray *pTagVals; // create by using super table, tag value
|
SArray *pTagVals; // create by using super table, tag value
|
||||||
char *fullname; // table full name
|
char *fullname; // table full name
|
||||||
STagData tagdata; // true tag data, super table full name is in STagData
|
STagData tagdata; // true tag data, super table full name is in STagData
|
||||||
|
@ -246,7 +247,7 @@ SCreateTableSQL *tSetCreateSqlElems(SArray *pCols, SArray *pTags, SQuerySQL *pSe
|
||||||
void tSqlExprNodeDestroy(tSQLExpr *pExpr);
|
void tSqlExprNodeDestroy(tSQLExpr *pExpr);
|
||||||
|
|
||||||
SAlterTableInfo * tAlterTableSqlElems(SStrToken *pTableName, SArray *pCols, SArray *pVals, int32_t type, int16_t tableTable);
|
SAlterTableInfo * tAlterTableSqlElems(SStrToken *pTableName, SArray *pCols, SArray *pVals, int32_t type, int16_t tableTable);
|
||||||
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists);
|
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagNames, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists);
|
||||||
|
|
||||||
void destroyAllSelectClause(SSubclauseInfo *pSql);
|
void destroyAllSelectClause(SSubclauseInfo *pSql);
|
||||||
void doDestroyQuerySql(SQuerySQL *pSql);
|
void doDestroyQuerySql(SQuerySQL *pSql);
|
||||||
|
|
|
@ -356,9 +356,20 @@ create_stable_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) LP columnlist(X) RP T
|
||||||
create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) TAGS LP tagitemlist(Y) RP. {
|
create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) TAGS LP tagitemlist(Y) RP. {
|
||||||
X.n += F.n;
|
X.n += F.n;
|
||||||
V.n += Z.n;
|
V.n += Z.n;
|
||||||
A = createNewChildTableInfo(&X, Y, &V, &U);
|
A = createNewChildTableInfo(&X, NULL, Y, &V, &U);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) LP tagNamelist(P) RP TAGS LP tagitemlist(Y) RP. {
|
||||||
|
X.n += F.n;
|
||||||
|
V.n += Z.n;
|
||||||
|
A = createNewChildTableInfo(&X, P, Y, &V, &U);
|
||||||
|
}
|
||||||
|
|
||||||
|
%type tagNamelist{SArray*}
|
||||||
|
%destructor tagNamelist {taosArrayDestroy($$);}
|
||||||
|
tagNamelist(A) ::= tagNamelist(X) COMMA ids(Y). {taosArrayPush(X, &Y); A = X; }
|
||||||
|
tagNamelist(A) ::= ids(X). {A = taosArrayInit(4, sizeof(SStrToken)); taosArrayPush(A, &X);}
|
||||||
|
|
||||||
// create stream
|
// create stream
|
||||||
// create table table_name as select count(*) from super_table_name interval(time)
|
// create table table_name as select count(*) from super_table_name interval(time)
|
||||||
create_table_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) AS select(S). {
|
create_table_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) AS select(S). {
|
||||||
|
|
|
@ -2574,12 +2574,16 @@ static void bottom_function(SQLFunctionCtx *pCtx) {
|
||||||
|
|
||||||
STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
|
STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
|
||||||
|
|
||||||
|
if ((void *)pRes->res[0] != (void *)((char *)pRes + sizeof(STopBotInfo) + POINTER_BYTES * pCtx->param[0].i64)) {
|
||||||
|
buildTopBotStruct(pRes, pCtx);
|
||||||
|
}
|
||||||
|
|
||||||
for (int32_t i = 0; i < pCtx->size; ++i) {
|
for (int32_t i = 0; i < pCtx->size; ++i) {
|
||||||
char *data = GET_INPUT_DATA(pCtx, i);
|
char *data = GET_INPUT_DATA(pCtx, i);
|
||||||
TSKEY ts = GET_TS_DATA(pCtx, i);
|
TSKEY ts = GET_TS_DATA(pCtx, i);
|
||||||
|
|
||||||
if (pCtx->hasNull && isNull(data, pCtx->inputType)) {
|
if (pCtx->hasNull && isNull(data, pCtx->inputType)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
notNullElems++;
|
notNullElems++;
|
||||||
|
@ -2608,6 +2612,11 @@ static void bottom_function_f(SQLFunctionCtx *pCtx, int32_t index) {
|
||||||
}
|
}
|
||||||
|
|
||||||
STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
|
STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
|
||||||
|
|
||||||
|
if ((void *)pRes->res[0] != (void *)((char *)pRes + sizeof(STopBotInfo) + POINTER_BYTES * pCtx->param[0].i64)) {
|
||||||
|
buildTopBotStruct(pRes, pCtx);
|
||||||
|
}
|
||||||
|
|
||||||
SET_VAL(pCtx, 1, 1);
|
SET_VAL(pCtx, 1, 1);
|
||||||
do_bottom_function_add(pRes, (int32_t)pCtx->param[0].i64, pData, ts, pCtx->inputType, &pCtx->tagInfo, NULL, 0);
|
do_bottom_function_add(pRes, (int32_t)pCtx->param[0].i64, pData, ts, pCtx->inputType, &pCtx->tagInfo, NULL, 0);
|
||||||
|
|
||||||
|
|
|
@ -3785,7 +3785,7 @@ void setResultRowOutputBufInitCtx(SQueryRuntimeEnv *pRuntimeEnv, SResultRow *pRe
|
||||||
if (functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM || functionId == TSDB_FUNC_DIFF) {
|
if (functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM || functionId == TSDB_FUNC_DIFF) {
|
||||||
pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].pOutput;
|
pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].pOutput;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!pCtx->resultInfo->initialized) {
|
if (!pCtx->resultInfo->initialized) {
|
||||||
aAggs[functionId].init(pCtx);
|
aAggs[functionId].init(pCtx);
|
||||||
}
|
}
|
||||||
|
|
|
@ -496,7 +496,8 @@ static void freeVariant(void *pItem) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void freeCreateTableInfo(void* p) {
|
void freeCreateTableInfo(void* p) {
|
||||||
SCreatedTableInfo* pInfo = (SCreatedTableInfo*) p;
|
SCreatedTableInfo* pInfo = (SCreatedTableInfo*) p;
|
||||||
|
taosArrayDestroy(pInfo->pTagNames);
|
||||||
taosArrayDestroyEx(pInfo->pTagVals, freeVariant);
|
taosArrayDestroyEx(pInfo->pTagVals, freeVariant);
|
||||||
tfree(pInfo->fullname);
|
tfree(pInfo->fullname);
|
||||||
tfree(pInfo->tagdata.data);
|
tfree(pInfo->tagdata.data);
|
||||||
|
@ -574,11 +575,12 @@ SCreateTableSQL *tSetCreateSqlElems(SArray *pCols, SArray *pTags, SQuerySQL *pSe
|
||||||
return pCreate;
|
return pCreate;
|
||||||
}
|
}
|
||||||
|
|
||||||
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists) {
|
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagNames, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists) {
|
||||||
SCreatedTableInfo info;
|
SCreatedTableInfo info;
|
||||||
memset(&info, 0, sizeof(SCreatedTableInfo));
|
memset(&info, 0, sizeof(SCreatedTableInfo));
|
||||||
|
|
||||||
info.name = *pToken;
|
info.name = *pToken;
|
||||||
|
info.pTagNames = pTagNames;
|
||||||
info.pTagVals = pTagVals;
|
info.pTagVals = pTagVals;
|
||||||
info.stableName = *pTableName;
|
info.stableName = *pTableName;
|
||||||
info.igExist = (igExists->n > 0)? 1:0;
|
info.igExist = (igExists->n > 0)? 1:0;
|
||||||
|
|
2026
src/query/src/sql.c
2026
src/query/src/sql.c
File diff suppressed because it is too large
Load Diff
|
@ -50,7 +50,8 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
|
||||||
STsdbMeta *pMeta = pRepo->tsdbMeta;
|
STsdbMeta *pMeta = pRepo->tsdbMeta;
|
||||||
STable * super = NULL;
|
STable * super = NULL;
|
||||||
STable * table = NULL;
|
STable * table = NULL;
|
||||||
int newSuper = 0;
|
bool newSuper = false;
|
||||||
|
bool superChanged = false;
|
||||||
int tid = pCfg->tableId.tid;
|
int tid = pCfg->tableId.tid;
|
||||||
STable * pTable = NULL;
|
STable * pTable = NULL;
|
||||||
|
|
||||||
|
@ -85,7 +86,7 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
|
||||||
if (pCfg->type == TSDB_CHILD_TABLE) {
|
if (pCfg->type == TSDB_CHILD_TABLE) {
|
||||||
super = tsdbGetTableByUid(pMeta, pCfg->superUid);
|
super = tsdbGetTableByUid(pMeta, pCfg->superUid);
|
||||||
if (super == NULL) { // super table not exists, try to create it
|
if (super == NULL) { // super table not exists, try to create it
|
||||||
newSuper = 1;
|
newSuper = true;
|
||||||
super = tsdbCreateTableFromCfg(pCfg, true);
|
super = tsdbCreateTableFromCfg(pCfg, true);
|
||||||
if (super == NULL) goto _err;
|
if (super == NULL) goto _err;
|
||||||
} else {
|
} else {
|
||||||
|
@ -93,6 +94,17 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
|
||||||
terrno = TSDB_CODE_TDB_IVD_CREATE_TABLE_INFO;
|
terrno = TSDB_CODE_TDB_IVD_CREATE_TABLE_INFO;
|
||||||
goto _err;
|
goto _err;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (schemaVersion(pCfg->tagSchema) > schemaVersion(super->tagSchema)) {
|
||||||
|
// tag schema out of date, need to update super table tag version
|
||||||
|
STSchema *pOldSchema = super->tagSchema;
|
||||||
|
TSDB_WLOCK_TABLE(super);
|
||||||
|
super->tagSchema = tdDupSchema(pCfg->tagSchema);
|
||||||
|
TSDB_WUNLOCK_TABLE(super);
|
||||||
|
tdFreeSchema(pOldSchema);
|
||||||
|
|
||||||
|
superChanged = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +129,7 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
|
||||||
// TODO: refactor duplicate codes
|
// TODO: refactor duplicate codes
|
||||||
int tlen = 0;
|
int tlen = 0;
|
||||||
void *pBuf = NULL;
|
void *pBuf = NULL;
|
||||||
if (newSuper) {
|
if (newSuper || superChanged) {
|
||||||
tlen = tsdbGetTableEncodeSize(TSDB_UPDATE_META, super);
|
tlen = tsdbGetTableEncodeSize(TSDB_UPDATE_META, super);
|
||||||
pBuf = tsdbAllocBytes(pRepo, tlen);
|
pBuf = tsdbAllocBytes(pRepo, tlen);
|
||||||
if (pBuf == NULL) goto _err;
|
if (pBuf == NULL) goto _err;
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
|
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
|
||||||
PROJECT(TDengine)
|
PROJECT(TDengine)
|
||||||
|
|
||||||
|
ADD_DEFINITIONS(-DWAL_CHECKSUM_WHOLE)
|
||||||
|
|
||||||
INCLUDE_DIRECTORIES(inc)
|
INCLUDE_DIRECTORIES(inc)
|
||||||
AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SRC)
|
AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SRC)
|
||||||
|
|
||||||
|
|
|
@ -111,6 +111,28 @@ void walRemoveAllOldFiles(void *handle) {
|
||||||
pthread_mutex_unlock(&pWal->mutex);
|
pthread_mutex_unlock(&pWal->mutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(WAL_CHECKSUM_WHOLE)
|
||||||
|
|
||||||
|
static void walUpdateChecksum(SWalHead *pHead) {
|
||||||
|
pHead->sver = 1;
|
||||||
|
pHead->cksum = 0;
|
||||||
|
pHead->cksum = taosCalcChecksum(0, (uint8_t *)pHead, sizeof(*pHead) + pHead->len);
|
||||||
|
}
|
||||||
|
|
||||||
|
static int walValidateChecksum(SWalHead *pHead) {
|
||||||
|
if (pHead->sver == 0) { // for compatible with wal before sver 1
|
||||||
|
return taosCheckChecksumWhole((uint8_t *)pHead, sizeof(*pHead));
|
||||||
|
} else if (pHead->sver == 1) {
|
||||||
|
uint32_t cksum = pHead->cksum;
|
||||||
|
pHead->cksum = 0;
|
||||||
|
return taosCheckChecksum((uint8_t *)pHead, sizeof(*pHead) + pHead->len, cksum);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
int32_t walWrite(void *handle, SWalHead *pHead) {
|
int32_t walWrite(void *handle, SWalHead *pHead) {
|
||||||
if (handle == NULL) return -1;
|
if (handle == NULL) return -1;
|
||||||
|
|
||||||
|
@ -123,7 +145,13 @@ int32_t walWrite(void *handle, SWalHead *pHead) {
|
||||||
if (pHead->version <= pWal->version) return 0;
|
if (pHead->version <= pWal->version) return 0;
|
||||||
|
|
||||||
pHead->signature = WAL_SIGNATURE;
|
pHead->signature = WAL_SIGNATURE;
|
||||||
|
#if defined(WAL_CHECKSUM_WHOLE)
|
||||||
|
walUpdateChecksum(pHead);
|
||||||
|
#else
|
||||||
|
pHead->sver = 0;
|
||||||
taosCalcChecksumAppend(0, (uint8_t *)pHead, sizeof(SWalHead));
|
taosCalcChecksumAppend(0, (uint8_t *)pHead, sizeof(SWalHead));
|
||||||
|
#endif
|
||||||
|
|
||||||
int32_t contLen = pHead->len + sizeof(SWalHead);
|
int32_t contLen = pHead->len + sizeof(SWalHead);
|
||||||
|
|
||||||
pthread_mutex_lock(&pWal->mutex);
|
pthread_mutex_lock(&pWal->mutex);
|
||||||
|
@ -246,16 +274,40 @@ static int32_t walSkipCorruptedRecord(SWal *pWal, SWalHead *pHead, int64_t tfd,
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(WAL_CHECKSUM_WHOLE)
|
||||||
|
if (pHead->sver == 0 && walValidateChecksum(pHead)) {
|
||||||
|
wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos);
|
||||||
|
*offset = pos;
|
||||||
|
return TSDB_CODE_SUCCESS;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pHead->sver == 1) {
|
||||||
|
if (tfRead(tfd, pHead->cont, pHead->len) < pHead->len) {
|
||||||
|
wError("vgId:%d, read to end of corrupted wal file, offset:%" PRId64, pWal->vgId, pos);
|
||||||
|
return TSDB_CODE_WAL_FILE_CORRUPTED;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (walValidateChecksum(pHead)) {
|
||||||
|
wInfo("vgId:%d, wal whole cksum check passed, offset:%" PRId64, pWal->vgId, pos);
|
||||||
|
*offset = pos;
|
||||||
|
return TSDB_CODE_SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
if (taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
|
if (taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
|
||||||
wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos);
|
wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos);
|
||||||
*offset = pos;
|
*offset = pos;
|
||||||
return TSDB_CODE_SUCCESS;
|
return TSDB_CODE_SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
return TSDB_CODE_WAL_FILE_CORRUPTED;
|
return TSDB_CODE_WAL_FILE_CORRUPTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, char *name, int64_t fileId) {
|
static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, char *name, int64_t fileId) {
|
||||||
int32_t size = WAL_MAX_SIZE;
|
int32_t size = WAL_MAX_SIZE;
|
||||||
void * buffer = tmalloc(size);
|
void * buffer = tmalloc(size);
|
||||||
|
@ -293,6 +345,51 @@ static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, ch
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(WAL_CHECKSUM_WHOLE)
|
||||||
|
if (pHead->sver == 0 && !walValidateChecksum(pHead)) {
|
||||||
|
wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
|
||||||
|
pHead->version, pHead->len, offset);
|
||||||
|
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
|
||||||
|
if (code != TSDB_CODE_SUCCESS) {
|
||||||
|
walFtruncate(pWal, tfd, offset);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pHead->len < 0 || pHead->len > size - sizeof(SWalHead)) {
|
||||||
|
wError("vgId:%d, file:%s, wal head len out of range, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
|
||||||
|
pHead->version, pHead->len, offset);
|
||||||
|
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
|
||||||
|
if (code != TSDB_CODE_SUCCESS) {
|
||||||
|
walFtruncate(pWal, tfd, offset);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ret = (int32_t)tfRead(tfd, pHead->cont, pHead->len);
|
||||||
|
if (ret < 0) {
|
||||||
|
wError("vgId:%d, file:%s, failed to read wal body since %s", pWal->vgId, name, strerror(errno));
|
||||||
|
code = TAOS_SYSTEM_ERROR(errno);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ret < pHead->len) {
|
||||||
|
wError("vgId:%d, file:%s, failed to read wal body, ret:%d len:%d", pWal->vgId, name, ret, pHead->len);
|
||||||
|
offset += sizeof(SWalHead);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pHead->sver == 1 && !walValidateChecksum(pHead)) {
|
||||||
|
wError("vgId:%d, file:%s, wal whole cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
|
||||||
|
pHead->version, pHead->len, offset);
|
||||||
|
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
|
||||||
|
if (code != TSDB_CODE_SUCCESS) {
|
||||||
|
walFtruncate(pWal, tfd, offset);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
if (!taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
|
if (!taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
|
||||||
wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
|
wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
|
||||||
pHead->version, pHead->len, offset);
|
pHead->version, pHead->len, offset);
|
||||||
|
@ -326,6 +423,7 @@ static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, ch
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#endif
|
||||||
offset = offset + sizeof(SWalHead) + pHead->len;
|
offset = offset + sizeof(SWalHead) + pHead->len;
|
||||||
|
|
||||||
wTrace("vgId:%d, restore wal, fileId:%" PRId64 " hver:%" PRIu64 " wver:%" PRIu64 " len:%d", pWal->vgId,
|
wTrace("vgId:%d, restore wal, fileId:%" PRId64 " hver:%" PRIu64 " wver:%" PRIu64 " len:%d", pWal->vgId,
|
||||||
|
|
|
@ -109,6 +109,13 @@ pipeline {
|
||||||
java --class-path=../../../../src/connector/jdbc/target:$JAVA_HOME/jre/lib/ext -jar target/JDBCDemo-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1
|
java --class-path=../../../../src/connector/jdbc/target:$JAVA_HOME/jre/lib/ext -jar target/JDBCDemo-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1
|
||||||
'''
|
'''
|
||||||
}
|
}
|
||||||
|
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
||||||
|
sh '''
|
||||||
|
cp -rf ${WKC}/tests/examples/nodejs ${JENKINS_HOME}/workspace/
|
||||||
|
cd ${JENKINS_HOME}/workspace/nodejs
|
||||||
|
node nodejsChecker.js host=localhost
|
||||||
|
'''
|
||||||
|
}
|
||||||
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
|
||||||
sh '''
|
sh '''
|
||||||
cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC#
|
cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC#
|
||||||
|
|
|
@ -62,7 +62,10 @@ int main(int argc, char *argv[]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// init TAOS
|
// init TAOS
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
TAOS *taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
|
TAOS *taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
printf("failed to connect to server, reason:%s\n", "null taos"/*taos_errstr(taos)*/);
|
printf("failed to connect to server, reason:%s\n", "null taos"/*taos_errstr(taos)*/);
|
||||||
|
|
|
@ -23,7 +23,10 @@ int main(int argc, char *argv[])
|
||||||
}
|
}
|
||||||
|
|
||||||
// init TAOS
|
// init TAOS
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
printf("failed to init taos\n");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
|
taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
|
|
|
@ -55,7 +55,10 @@ int main(int argc, char *argv[])
|
||||||
}
|
}
|
||||||
|
|
||||||
// init TAOS
|
// init TAOS
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
printf("failed to init taos\n");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
strcpy(db_name, argv[2]);
|
strcpy(db_name, argv[2]);
|
||||||
strcpy(tbl_name, argv[3]);
|
strcpy(tbl_name, argv[3]);
|
||||||
|
|
|
@ -217,7 +217,10 @@ int main(int argc, char *argv[]) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// init TAOS
|
// init TAOS
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
printf("failed to init taos\n");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
TAOS* taos = taos_connect(host, user, passwd, "", 0);
|
TAOS* taos = taos_connect(host, user, passwd, "", 0);
|
||||||
if (taos == NULL) {
|
if (taos == NULL) {
|
||||||
|
|
|
@ -39,6 +39,8 @@ function buildTDengine {
|
||||||
cd $WORK_DIR/TDengine
|
cd $WORK_DIR/TDengine
|
||||||
|
|
||||||
git remote update > /dev/null
|
git remote update > /dev/null
|
||||||
|
git reset --hard HEAD
|
||||||
|
git checkout develop
|
||||||
REMOTE_COMMIT=`git rev-parse --short remotes/origin/develop`
|
REMOTE_COMMIT=`git rev-parse --short remotes/origin/develop`
|
||||||
LOCAL_COMMIT=`git rev-parse --short @`
|
LOCAL_COMMIT=`git rev-parse --short @`
|
||||||
|
|
||||||
|
@ -54,15 +56,16 @@ function buildTDengine {
|
||||||
cd debug
|
cd debug
|
||||||
rm -rf *
|
rm -rf *
|
||||||
cmake .. > /dev/null
|
cmake .. > /dev/null
|
||||||
make > /dev/null
|
make && make install > /dev/null
|
||||||
make install
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
function runQueryPerfTest {
|
function runQueryPerfTest {
|
||||||
[ -f $PERFORMANCE_TEST_REPORT ] && rm $PERFORMANCE_TEST_REPORT
|
[ -f $PERFORMANCE_TEST_REPORT ] && rm $PERFORMANCE_TEST_REPORT
|
||||||
nohup $WORK_DIR/TDengine/debug/build/bin/taosd -c /etc/taosperf/ > /dev/null 2>&1 &
|
nohup $WORK_DIR/TDengine/debug/build/bin/taosd -c /etc/taosperf/ > /dev/null 2>&1 &
|
||||||
echoInfo "Run Performance Test"
|
echoInfo "Wait TDengine to start"
|
||||||
|
sleep 60
|
||||||
|
echoInfo "Run Performance Test"
|
||||||
cd $WORK_DIR/TDengine/tests/pytest
|
cd $WORK_DIR/TDengine/tests/pytest
|
||||||
|
|
||||||
python3 query/queryPerformance.py -c $LOCAL_COMMIT | tee -a $PERFORMANCE_TEST_REPORT
|
python3 query/queryPerformance.py -c $LOCAL_COMMIT | tee -a $PERFORMANCE_TEST_REPORT
|
||||||
|
@ -104,6 +107,7 @@ function sendReport {
|
||||||
stopTaosd
|
stopTaosd
|
||||||
buildTDengine
|
buildTDengine
|
||||||
runQueryPerfTest
|
runQueryPerfTest
|
||||||
|
stopTaosd
|
||||||
|
|
||||||
echoInfo "Send Report"
|
echoInfo "Send Report"
|
||||||
sendReport
|
sendReport
|
||||||
|
|
|
@ -354,10 +354,11 @@ class ThreadCoordinator:
|
||||||
# end, and maybe signal them to stop
|
# end, and maybe signal them to stop
|
||||||
if isinstance(err, CrashGenError): # our own transition failure
|
if isinstance(err, CrashGenError): # our own transition failure
|
||||||
Logging.info("State transition error")
|
Logging.info("State transition error")
|
||||||
|
# TODO: saw an error here once, let's print out stack info for err?
|
||||||
traceback.print_stack()
|
traceback.print_stack()
|
||||||
transitionFailed = True
|
transitionFailed = True
|
||||||
self._te = None # Not running any more
|
self._te = None # Not running any more
|
||||||
self._execStats.registerFailure("State transition error")
|
self._execStats.registerFailure("State transition error: {}".format(err))
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
# return transitionFailed # Why did we have this??!!
|
# return transitionFailed # Why did we have this??!!
|
||||||
|
@ -882,8 +883,12 @@ class StateMechine:
|
||||||
self._stateWeights = [1, 2, 10, 40]
|
self._stateWeights = [1, 2, 10, 40]
|
||||||
|
|
||||||
def init(self, dbc: DbConn): # late initailization, don't save the dbConn
|
def init(self, dbc: DbConn): # late initailization, don't save the dbConn
|
||||||
self._curState = self._findCurrentState(dbc) # starting state
|
try:
|
||||||
Logging.debug("Found Starting State: {}".format(self._curState))
|
self._curState = self._findCurrentState(dbc) # starting state
|
||||||
|
except taos.error.ProgrammingError as err:
|
||||||
|
Logging.error("Failed to initialized state machine, cannot find current state: {}".format(err))
|
||||||
|
traceback.print_stack()
|
||||||
|
raise # re-throw
|
||||||
|
|
||||||
# TODO: seems no lnoger used, remove?
|
# TODO: seems no lnoger used, remove?
|
||||||
def getCurrentState(self):
|
def getCurrentState(self):
|
||||||
|
@ -951,6 +956,8 @@ class StateMechine:
|
||||||
|
|
||||||
# We transition the system to a new state by examining the current state itself
|
# We transition the system to a new state by examining the current state itself
|
||||||
def transition(self, tasks, dbc: DbConn):
|
def transition(self, tasks, dbc: DbConn):
|
||||||
|
global gSvcMgr
|
||||||
|
|
||||||
if (len(tasks) == 0): # before 1st step, or otherwise empty
|
if (len(tasks) == 0): # before 1st step, or otherwise empty
|
||||||
Logging.debug("[STT] Starting State: {}".format(self._curState))
|
Logging.debug("[STT] Starting State: {}".format(self._curState))
|
||||||
return # do nothing
|
return # do nothing
|
||||||
|
@ -2370,7 +2377,7 @@ class MainExec:
|
||||||
'-n',
|
'-n',
|
||||||
'--dynamic-db-table-names',
|
'--dynamic-db-table-names',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Use non-fixed names for dbs/tables, useful for multi-instance executions (default: false)')
|
help='Use non-fixed names for dbs/tables, for -b, useful for multi-instance executions (default: false)')
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o',
|
'-o',
|
||||||
'--num-dnodes',
|
'--num-dnodes',
|
||||||
|
|
|
@ -15,6 +15,7 @@ from util.log import *
|
||||||
from .misc import Logging, CrashGenError, Helper, Dice
|
from .misc import Logging, CrashGenError, Helper, Dice
|
||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
|
import traceback
|
||||||
# from .service_manager import TdeInstance
|
# from .service_manager import TdeInstance
|
||||||
|
|
||||||
class DbConn:
|
class DbConn:
|
||||||
|
@ -349,6 +350,7 @@ class DbConnNative(DbConn):
|
||||||
|
|
||||||
def execute(self, sql):
|
def execute(self, sql):
|
||||||
if (not self.isOpen):
|
if (not self.isOpen):
|
||||||
|
traceback.print_stack()
|
||||||
raise CrashGenError(
|
raise CrashGenError(
|
||||||
"Cannot exec SQL unless db connection is open", CrashGenError.DB_CONNECTION_NOT_OPEN)
|
"Cannot exec SQL unless db connection is open", CrashGenError.DB_CONNECTION_NOT_OPEN)
|
||||||
Logging.debug("[SQL] Executing SQL: {}".format(sql))
|
Logging.debug("[SQL] Executing SQL: {}".format(sql))
|
||||||
|
@ -361,6 +363,7 @@ class DbConnNative(DbConn):
|
||||||
|
|
||||||
def query(self, sql): # return rows affected
|
def query(self, sql): # return rows affected
|
||||||
if (not self.isOpen):
|
if (not self.isOpen):
|
||||||
|
traceback.print_stack()
|
||||||
raise CrashGenError(
|
raise CrashGenError(
|
||||||
"Cannot query database until connection is open, restarting?", CrashGenError.DB_CONNECTION_NOT_OPEN)
|
"Cannot query database until connection is open, restarting?", CrashGenError.DB_CONNECTION_NOT_OPEN)
|
||||||
Logging.debug("[SQL] Executing SQL: {}".format(sql))
|
Logging.debug("[SQL] Executing SQL: {}".format(sql))
|
||||||
|
|
|
@ -66,7 +66,7 @@ python3 ./test.py -f tag_lite/int.py
|
||||||
python3 ./test.py -f tag_lite/set.py
|
python3 ./test.py -f tag_lite/set.py
|
||||||
python3 ./test.py -f tag_lite/smallint.py
|
python3 ./test.py -f tag_lite/smallint.py
|
||||||
python3 ./test.py -f tag_lite/tinyint.py
|
python3 ./test.py -f tag_lite/tinyint.py
|
||||||
|
python3 ./test.py -f tag_lite/alter_tag.py
|
||||||
#python3 ./test.py -f dbmgmt/database-name-boundary.py
|
#python3 ./test.py -f dbmgmt/database-name-boundary.py
|
||||||
|
|
||||||
python3 ./test.py -f import_merge/importBlock1HO.py
|
python3 ./test.py -f import_merge/importBlock1HO.py
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
###################################################################
|
||||||
|
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This file is proprietary and confidential to TAOS Technologies.
|
||||||
|
# No part of this file may be reproduced, stored, transmitted,
|
||||||
|
# disclosed or used in any form or by any means other than as
|
||||||
|
# expressly provided by the written permission from Jianhui Tao
|
||||||
|
#
|
||||||
|
###################################################################
|
||||||
|
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from util.log import *
|
||||||
|
from util.cases import *
|
||||||
|
from util.sql import *
|
||||||
|
|
||||||
|
|
||||||
|
class TDTestCase:
|
||||||
|
def init(self, conn, logSql):
|
||||||
|
tdLog.debug("start to execute %s" % __file__)
|
||||||
|
tdSql.init(conn.cursor(), logSql)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
tdSql.prepare()
|
||||||
|
|
||||||
|
ret = tdSql.execute(
|
||||||
|
'create table tb (ts timestamp, speed int unsigned)')
|
||||||
|
|
||||||
|
insertRows = 10
|
||||||
|
tdLog.info("insert %d rows" % (insertRows))
|
||||||
|
for i in range(0, insertRows):
|
||||||
|
ret = tdSql.execute(
|
||||||
|
'insert into tb values (now + %dm, %d)' %
|
||||||
|
(i, i))
|
||||||
|
|
||||||
|
tdLog.info("insert earlier data")
|
||||||
|
tdSql.execute('insert into tb values (now - 5m , 10)')
|
||||||
|
tdSql.execute('insert into tb values (now - 6m , 10)')
|
||||||
|
tdSql.execute('insert into tb values (now - 7m , 10)')
|
||||||
|
tdSql.execute('insert into tb values (now - 8m , 4294967294)')
|
||||||
|
|
||||||
|
tdSql.error('insert into tb values (now - 9m, -1)')
|
||||||
|
tdSql.error('insert into tb values (now - 9m, 4294967295)')
|
||||||
|
|
||||||
|
tdSql.query("select * from tb")
|
||||||
|
tdSql.checkRows(insertRows + 4)
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
tdSql.close()
|
||||||
|
tdLog.success("%s successfully executed" % __file__)
|
||||||
|
|
||||||
|
|
||||||
|
tdCases.addWindows(__file__, TDTestCase())
|
||||||
|
tdCases.addLinux(__file__, TDTestCase())
|
|
@ -19,7 +19,7 @@ python3 ./test.py -f insert/randomNullCommit.py
|
||||||
#python3 insert/retentionpolicy.py
|
#python3 insert/retentionpolicy.py
|
||||||
python3 ./test.py -f insert/alterTableAndInsert.py
|
python3 ./test.py -f insert/alterTableAndInsert.py
|
||||||
python3 ./test.py -f insert/insertIntoTwoTables.py
|
python3 ./test.py -f insert/insertIntoTwoTables.py
|
||||||
#python3 ./test.py -f insert/before_1970.py
|
python3 ./test.py -f insert/before_1970.py
|
||||||
python3 bug2265.py
|
python3 bug2265.py
|
||||||
|
|
||||||
#table
|
#table
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
###################################################################
|
||||||
|
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This file is proprietary and confidential to TAOS Technologies.
|
||||||
|
# No part of this file may be reproduced, stored, transmitted,
|
||||||
|
# disclosed or used in any form or by any means other than as
|
||||||
|
# expressly provided by the written permission from Jianhui Tao
|
||||||
|
#
|
||||||
|
###################################################################
|
||||||
|
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import taos
|
||||||
|
from util.log import tdLog
|
||||||
|
from util.cases import tdCases
|
||||||
|
from util.sql import tdSql
|
||||||
|
from util.dnodes import tdDnodes
|
||||||
|
|
||||||
|
class TDTestCase:
|
||||||
|
def init(self, conn, logSql):
|
||||||
|
tdLog.debug("start to execute %s" % __file__)
|
||||||
|
tdSql.init(conn.cursor(), logSql)
|
||||||
|
|
||||||
|
self.ts = 1538548685000
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
tdSql.prepare()
|
||||||
|
|
||||||
|
print("==============step1")
|
||||||
|
tdSql.execute(
|
||||||
|
"CREATE TABLE IF NOT EXISTS ampere (ts TIMESTAMP(8),ampere DOUBLE(8)) TAGS (device_name BINARY(50),build_id BINARY(50),project_id BINARY(50),alias BINARY(50))")
|
||||||
|
tdSql.execute("insert into d1001 using ampere tags('test', '2', '2', '2') VALUES (now, 123)")
|
||||||
|
tdSql.execute("ALTER TABLE ampere ADD TAG variable_id BINARY(50)")
|
||||||
|
|
||||||
|
print("==============step2")
|
||||||
|
|
||||||
|
tdSql.execute("insert into d1002 using ampere tags('test', '2', '2', '2', 'test') VALUES (now, 124)")
|
||||||
|
|
||||||
|
tdSql.query("select * from ampere")
|
||||||
|
tdSql.checkRows(2)
|
||||||
|
tdSql.checkData(0, 6, None)
|
||||||
|
tdSql.checkData(1, 6, 'test')
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
tdSql.close()
|
||||||
|
tdLog.success("%s successfully executed" % __file__)
|
||||||
|
|
||||||
|
|
||||||
|
tdCases.addWindows(__file__, TDTestCase())
|
||||||
|
tdCases.addLinux(__file__, TDTestCase())
|
|
@ -0,0 +1,162 @@
|
||||||
|
system sh/stop_dnodes.sh
|
||||||
|
system sh/deploy.sh -n dnode1 -i 1
|
||||||
|
system sh/cfg.sh -n dnode1 -c walLevel -v 0
|
||||||
|
system sh/cfg.sh -n dnode1 -c maxtablesPerVnode -v 2
|
||||||
|
system sh/exec.sh -n dnode1 -s start
|
||||||
|
|
||||||
|
sleep 100
|
||||||
|
sql connect
|
||||||
|
print ======================== dnode1 start
|
||||||
|
|
||||||
|
$db = testdb
|
||||||
|
|
||||||
|
sql create database $db
|
||||||
|
sql use $db
|
||||||
|
|
||||||
|
sql create stable st2 (ts timestamp, f1 int) tags (id int, t1 int, t2 nchar(4), t3 double)
|
||||||
|
|
||||||
|
|
||||||
|
sql insert into tb1 using st2 (id, t1) tags(1,2) values (now, 1)
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb1
|
||||||
|
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != 2 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
sql create table tb2 using st2 (t2,t3) tags ("12",22.0)
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb2;
|
||||||
|
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != 12 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != 22.000000000 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
|
||||||
|
sql create table tb3 using st2 tags (1,2,"3",33.0);
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb3;
|
||||||
|
|
||||||
|
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != 2 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != 3 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != 33.000000000 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
sql insert into tb4 using st2 tags(1,2,"33",44.0) values (now, 1);
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb4;
|
||||||
|
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != 2 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != 33 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != 44.000000000 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
sql_error create table tb5 using st2() tags (3,3,"3",33.0);
|
||||||
|
|
||||||
|
sql_error create table tb6 using st2 (id,t1) tags (3,3,"3",33.0);
|
||||||
|
|
||||||
|
sql_error create table tb7 using st2 (id,t1) tags (3);
|
||||||
|
|
||||||
|
sql_error create table tb8 using st2 (ide) tags (3);
|
||||||
|
|
||||||
|
sql_error create table tb9 using st2 (id);
|
||||||
|
|
||||||
|
sql_error create table tb10 using st2 (id t1) tags (1,1);
|
||||||
|
|
||||||
|
sql_error create table tb10 using st2 (id,,t1) tags (1,1,1);
|
||||||
|
|
||||||
|
sql_error create table tb11 using st2 (id,t1,) tags (1,1,1);
|
||||||
|
|
||||||
|
sql create table tb12 using st2 (t1,id) tags (2,1);
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb12;
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != 2 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
sql create table tb13 using st2 ("t1",'id') tags (2,1);
|
||||||
|
|
||||||
|
sql select id,t1,t2,t3 from tb13;
|
||||||
|
|
||||||
|
if $rows != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
if $data00 != 1 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data01 != 2 then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data02 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
if $data03 != NULL then
|
||||||
|
return -1
|
||||||
|
endi
|
||||||
|
|
||||||
|
system sh/exec.sh -n dnode1 -s stop -x SIGINT
|
|
@ -72,4 +72,3 @@ cd ../../../debug; make
|
||||||
./test.sh -f unique/cluster/cache.sim
|
./test.sh -f unique/cluster/cache.sim
|
||||||
./test.sh -f unique/cluster/vgroup100.sim
|
./test.sh -f unique/cluster/vgroup100.sim
|
||||||
|
|
||||||
./test.sh -f unique/column/replica3.sim
|
|
|
@ -35,4 +35,6 @@
|
||||||
./test.sh -f general/stable/refcount.sim
|
./test.sh -f general/stable/refcount.sim
|
||||||
./test.sh -f general/stable/show.sim
|
./test.sh -f general/stable/show.sim
|
||||||
./test.sh -f general/stable/values.sim
|
./test.sh -f general/stable/values.sim
|
||||||
./test.sh -f general/stable/vnode3.sim
|
./test.sh -f general/stable/vnode3.sim
|
||||||
|
|
||||||
|
./test.sh -f unique/column/replica3.sim
|
|
@ -81,7 +81,9 @@ char *simParseHostName(char *varName) {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool simSystemInit() {
|
bool simSystemInit() {
|
||||||
taos_init();
|
if (taos_init()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
taosGetFqdn(simHostName);
|
taosGetFqdn(simHostName);
|
||||||
simInitsimCmdList();
|
simInitsimCmdList();
|
||||||
memset(simScriptList, 0, sizeof(SScript *) * MAX_MAIN_SCRIPT_NUM);
|
memset(simScriptList, 0, sizeof(SScript *) * MAX_MAIN_SCRIPT_NUM);
|
||||||
|
|
Loading…
Reference in New Issue