Merge branch 'develop' into docs/Update-Latest-Feature

This commit is contained in:
Elias Soong 2021-02-19 18:03:53 +08:00
commit 4d774efcc3
84 changed files with 3306 additions and 1333 deletions

12
.gitignore vendored
View File

@ -79,3 +79,15 @@ tests/comparisonTest/opentsdb/opentsdbtest/.settings/
tests/examples/JDBC/JDBCDemo/.classpath tests/examples/JDBC/JDBCDemo/.classpath
tests/examples/JDBC/JDBCDemo/.project tests/examples/JDBC/JDBCDemo/.project
tests/examples/JDBC/JDBCDemo/.settings/ tests/examples/JDBC/JDBCDemo/.settings/
# Emacs
# -*- mode: gitignore; -*-
*~
\#*\#
/.emacs.desktop
/.emacs.desktop.lock
*.elc
auto-save-list
tramp
.\#*
TAGS

6
.gitmodules vendored
View File

@ -4,9 +4,9 @@
[submodule "src/connector/grafanaplugin"] [submodule "src/connector/grafanaplugin"]
path = src/connector/grafanaplugin path = src/connector/grafanaplugin
url = https://github.com/taosdata/grafanaplugin url = https://github.com/taosdata/grafanaplugin
[submodule "src/connector/hivemq-tdengine-extension"]
path = src/connector/hivemq-tdengine-extension
url = https://github.com/huskar-t/hivemq-tdengine-extension.git
[submodule "tests/examples/rust"] [submodule "tests/examples/rust"]
path = tests/examples/rust path = tests/examples/rust
url = https://github.com/songtianyi/tdengine-rust-bindings.git url = https://github.com/songtianyi/tdengine-rust-bindings.git
[submodule "src/connector/hivemq-tdengine-extension"]
path = src/connector/hivemq-tdengine-extension
url = https://github.com/huskar-t/hivemq-tdengine-extension.git

7
Jenkinsfile vendored
View File

@ -5,7 +5,7 @@ node {
git url: 'https://github.com/taosdata/TDengine.git' git url: 'https://github.com/taosdata/TDengine.git'
} }
def kipstage=0 def skipstage=0
def abortPreviousBuilds() { def abortPreviousBuilds() {
def currentJobName = env.JOB_NAME def currentJobName = env.JOB_NAME
def currentBuildNumber = env.BUILD_NUMBER.toInteger() def currentBuildNumber = env.BUILD_NUMBER.toInteger()
@ -88,8 +88,9 @@ pipeline {
git checkout -qf FETCH_HEAD git checkout -qf FETCH_HEAD
''' '''
script{ script{
skipstage=sh(script:"git --no-pager diff --name-only FETCH_HEAD develop|grep -v -E '.*md|//src//connector|Jenkinsfile|test-all.sh' || echo 0 ",returnStdout:true) env.skipstage=sh(script:"cd ${WORKSPACE}.tes && git --no-pager diff --name-only FETCH_HEAD develop|grep -v -E '.*md|//src//connector|Jenkinsfile|test-all.sh' || echo 0 ",returnStdout:true)
} }
println env.skipstage
sh''' sh'''
rm -rf ${WORKSPACE}.tes rm -rf ${WORKSPACE}.tes
''' '''
@ -101,7 +102,7 @@ pipeline {
when { when {
changeRequest() changeRequest()
expression { expression {
skipstage != 0 env.skipstage != 0
} }
} }
parallel { parallel {

View File

@ -4,7 +4,7 @@ PROJECT(TDengine)
IF (DEFINED VERNUMBER) IF (DEFINED VERNUMBER)
SET(TD_VER_NUMBER ${VERNUMBER}) SET(TD_VER_NUMBER ${VERNUMBER})
ELSE () ELSE ()
SET(TD_VER_NUMBER "2.0.14.0") SET(TD_VER_NUMBER "2.0.16.0")
ENDIF () ENDIF ()
IF (DEFINED VERCOMPATIBLE) IF (DEFINED VERCOMPATIBLE)

View File

@ -362,24 +362,6 @@ TDengine缺省的时间戳是毫秒精度但通过修改配置参数enableMic
**历史记录写入**可使用IMPORT或者INSERT命令IMPORT的语法功能与INSERT完全一样。 **历史记录写入**可使用IMPORT或者INSERT命令IMPORT的语法功能与INSERT完全一样。
## <a class="anchor" id="select"></a>数据查询
### 查询语法:
```mysql
SELECT select_expr [, select_expr ...]
FROM {tb_name_list}
[WHERE where_condition]
[INTERVAL (interval_val [, interval_offset])]
[FILL fill_val]
[SLIDING fill_val]
[GROUP BY col_list]
[ORDER BY col_list { DESC | ASC }]
[SLIMIT limit_val [, SOFFSET offset_val]]
[LIMIT limit_val [, OFFSET offset_val]]
[>> export_file];
```
说明:针对 insert 类型的 SQL 语句我们采用的流式解析策略在发现后面的错误之前前面正确的部分SQL仍会执行。下面的sql中insert语句是无效的但是d1001仍会被创建。 说明:针对 insert 类型的 SQL 语句我们采用的流式解析策略在发现后面的错误之前前面正确的部分SQL仍会执行。下面的sql中insert语句是无效的但是d1001仍会被创建。
```mysql ```mysql
@ -406,6 +388,24 @@ taos> SHOW TABLES;
Query OK, 1 row(s) in set (0.001091s) Query OK, 1 row(s) in set (0.001091s)
``` ```
## <a class="anchor" id="select"></a>数据查询
### 查询语法:
```mysql
SELECT select_expr [, select_expr ...]
FROM {tb_name_list}
[WHERE where_condition]
[INTERVAL (interval_val [, interval_offset])]
[FILL fill_val]
[SLIDING fill_val]
[GROUP BY col_list]
[ORDER BY col_list { DESC | ASC }]
[SLIMIT limit_val [, SOFFSET offset_val]]
[LIMIT limit_val [, OFFSET offset_val]]
[>> export_file];
```
#### SELECT子句 #### SELECT子句
一个选择子句可以是联合查询UNION和另一个查询的子查询SUBQUERY 一个选择子句可以是联合查询UNION和另一个查询的子查询SUBQUERY

View File

@ -35,10 +35,11 @@ done
echo "verNumber=${verNumber}" echo "verNumber=${verNumber}"
docker manifest create -a tdengine/tdengine:${verNumber} tdengine/tdengine-amd64:${verNumber} tdengine/tdengine-aarch64:${verNumber} tdengine/tdengine-aarch32:${verNumber} #docker manifest create -a tdengine/tdengine:${verNumber} tdengine/tdengine-amd64:${verNumber} tdengine/tdengine-aarch64:${verNumber} tdengine/tdengine-aarch32:${verNumber}
docker manifest create -a tdengine/tdengine tdengine/tdengine-amd64:latest tdengine/tdengine-aarch64:latest tdengine/tdengine-aarch32:latest
docker login -u tdengine -p ${passWord} #replace the docker registry username and password docker login -u tdengine -p ${passWord} #replace the docker registry username and password
docker manifest push tdengine/tdengine:${verNumber} docker manifest push tdengine/tdengine
# how set latest version ??? # how set latest version ???

View File

@ -9,6 +9,7 @@ Summary: tdengine from taosdata
Group: Application/Database Group: Application/Database
License: AGPL License: AGPL
URL: www.taosdata.com URL: www.taosdata.com
AutoReqProv: no
#BuildRoot: %_topdir/BUILDROOT #BuildRoot: %_topdir/BUILDROOT
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root

View File

@ -1,6 +1,6 @@
name: tdengine name: tdengine
base: core18 base: core18
version: '2.0.14.0' version: '2.0.16.0'
icon: snap/gui/t-dengine.svg icon: snap/gui/t-dengine.svg
summary: an open-source big data platform designed and optimized for IoT. summary: an open-source big data platform designed and optimized for IoT.
description: | description: |
@ -72,7 +72,7 @@ parts:
- usr/bin/taosd - usr/bin/taosd
- usr/bin/taos - usr/bin/taos
- usr/bin/taosdemo - usr/bin/taosdemo
- usr/lib/libtaos.so.2.0.14.0 - usr/lib/libtaos.so.2.0.16.0
- usr/lib/libtaos.so.1 - usr/lib/libtaos.so.1
- usr/lib/libtaos.so - usr/lib/libtaos.so

View File

@ -6375,16 +6375,14 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
// get table meta from mnode // get table meta from mnode
code = tNameExtractFullName(&pStableMetaInfo->name, pCreateTableInfo->tagdata.name); code = tNameExtractFullName(&pStableMetaInfo->name, pCreateTableInfo->tagdata.name);
SArray* pList = pCreateTableInfo->pTagVals; SArray* pValList = pCreateTableInfo->pTagVals;
code = tscGetTableMeta(pSql, pStableMetaInfo); code = tscGetTableMeta(pSql, pStableMetaInfo);
if (code != TSDB_CODE_SUCCESS) { if (code != TSDB_CODE_SUCCESS) {
return code; return code;
} }
size_t size = taosArrayGetSize(pList); size_t valSize = taosArrayGetSize(pValList);
if (tscGetNumOfTags(pStableMetaInfo->pTableMeta) != size) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
}
// too long tag values will return invalid sql, not be truncated automatically // too long tag values will return invalid sql, not be truncated automatically
SSchema *pTagSchema = tscGetTableTagSchema(pStableMetaInfo->pTableMeta); SSchema *pTagSchema = tscGetTableTagSchema(pStableMetaInfo->pTableMeta);
@ -6395,36 +6393,111 @@ int32_t doCheckForCreateFromStable(SSqlObj* pSql, SSqlInfo* pInfo) {
return TSDB_CODE_TSC_OUT_OF_MEMORY; return TSDB_CODE_TSC_OUT_OF_MEMORY;
} }
SArray* pNameList = NULL;
size_t nameSize = 0;
int32_t schemaSize = tscGetNumOfTags(pStableMetaInfo->pTableMeta);
int32_t ret = TSDB_CODE_SUCCESS; int32_t ret = TSDB_CODE_SUCCESS;
for (int32_t i = 0; i < size; ++i) {
SSchema* pSchema = &pTagSchema[i];
tVariantListItem* pItem = taosArrayGet(pList, i);
char tagVal[TSDB_MAX_TAGS_LEN]; if (pCreateTableInfo->pTagNames) {
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) { pNameList = pCreateTableInfo->pTagNames;
if (pItem->pVar.nLen > pSchema->bytes) { nameSize = taosArrayGetSize(pNameList);
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3); if (valSize != nameSize) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
}
if (schemaSize < valSize) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
}
bool findColumnIndex = false;
for (int32_t i = 0; i < nameSize; ++i) {
SStrToken* sToken = taosArrayGet(pNameList, i);
if (TK_STRING == sToken->type) {
tscDequoteAndTrimToken(sToken);
} }
}
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true); tVariantListItem* pItem = taosArrayGet(pValList, i);
// check again after the convert since it may be converted from binary to nchar. findColumnIndex = false;
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
int16_t len = varDataTLen(tagVal); // todo speedup by using hash list
if (len > pSchema->bytes) { for (int32_t t = 0; t < schemaSize; ++t) {
tdDestroyKVRowBuilder(&kvRowBuilder); if (strncmp(sToken->z, pTagSchema[t].name, sToken->n) == 0 && strlen(pTagSchema[t].name) == sToken->n) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3); SSchema* pSchema = &pTagSchema[t];
char tagVal[TSDB_MAX_TAGS_LEN];
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
if (pItem->pVar.nLen > pSchema->bytes) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
}
}
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true);
// check again after the convert since it may be converted from binary to nchar.
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
int16_t len = varDataTLen(tagVal);
if (len > pSchema->bytes) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
}
}
if (ret != TSDB_CODE_SUCCESS) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4);
}
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
findColumnIndex = true;
break;
}
} }
if (!findColumnIndex) {
return tscInvalidSQLErrMsg(pCmd->payload, "invalid tag name", sToken->z);
}
}
} else {
if (schemaSize != valSize) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg5);
} }
if (ret != TSDB_CODE_SUCCESS) { for (int32_t i = 0; i < valSize; ++i) {
tdDestroyKVRowBuilder(&kvRowBuilder); SSchema* pSchema = &pTagSchema[i];
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4); tVariantListItem* pItem = taosArrayGet(pValList, i);
char tagVal[TSDB_MAX_TAGS_LEN];
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
if (pItem->pVar.nLen > pSchema->bytes) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
}
}
ret = tVariantDump(&(pItem->pVar), tagVal, pSchema->type, true);
// check again after the convert since it may be converted from binary to nchar.
if (pSchema->type == TSDB_DATA_TYPE_BINARY || pSchema->type == TSDB_DATA_TYPE_NCHAR) {
int16_t len = varDataTLen(tagVal);
if (len > pSchema->bytes) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg3);
}
}
if (ret != TSDB_CODE_SUCCESS) {
tdDestroyKVRowBuilder(&kvRowBuilder);
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg4);
}
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
} }
tdAddColToKVRow(&kvRowBuilder, pSchema->colId, pSchema->type, tagVal);
} }
SKVRow row = tdGetKVRowFromBuilder(&kvRowBuilder); SKVRow row = tdGetKVRowFromBuilder(&kvRowBuilder);

View File

@ -52,7 +52,9 @@ static bool validPassword(const char* passwd) {
static SSqlObj *taosConnectImpl(const char *ip, const char *user, const char *pass, const char *auth, const char *db, static SSqlObj *taosConnectImpl(const char *ip, const char *user, const char *pass, const char *auth, const char *db,
uint16_t port, void (*fp)(void *, TAOS_RES *, int), void *param, TAOS **taos) { uint16_t port, void (*fp)(void *, TAOS_RES *, int), void *param, TAOS **taos) {
taos_init(); if (taos_init()) {
return NULL;
}
if (!validUserName(user)) { if (!validUserName(user)) {
terrno = TSDB_CODE_TSC_INVALID_USER_LENGTH; terrno = TSDB_CODE_TSC_INVALID_USER_LENGTH;

View File

@ -47,6 +47,7 @@ void *tscRpcCache; // cache to keep rpc obj
int32_t tscNumOfThreads = 1; // num of rpc threads int32_t tscNumOfThreads = 1; // num of rpc threads
static pthread_mutex_t rpcObjMutex; // mutex to protect open the rpc obj concurrently static pthread_mutex_t rpcObjMutex; // mutex to protect open the rpc obj concurrently
static pthread_once_t tscinit = PTHREAD_ONCE_INIT; static pthread_once_t tscinit = PTHREAD_ONCE_INIT;
static volatile int tscInitRes = 0;
void tscCheckDiskUsage(void *UNUSED_PARAM(para), void *UNUSED_PARAM(param)) { void tscCheckDiskUsage(void *UNUSED_PARAM(para), void *UNUSED_PARAM(param)) {
taosGetDisk(); taosGetDisk();
@ -137,7 +138,11 @@ void taos_init_imp(void) {
} }
taosReadGlobalCfg(); taosReadGlobalCfg();
taosCheckGlobalCfg(); if (taosCheckGlobalCfg()) {
tscInitRes = -1;
return;
}
taosInitNotes(); taosInitNotes();
rpcInit(); rpcInit();
@ -159,6 +164,7 @@ void taos_init_imp(void) {
tscQhandle = taosInitScheduler(queueSize, tscNumOfThreads, "tsc"); tscQhandle = taosInitScheduler(queueSize, tscNumOfThreads, "tsc");
if (NULL == tscQhandle) { if (NULL == tscQhandle) {
tscError("failed to init scheduler"); tscError("failed to init scheduler");
tscInitRes = -1;
return; return;
} }
@ -187,7 +193,7 @@ void taos_init_imp(void) {
tscDebug("client is initialized successfully"); tscDebug("client is initialized successfully");
} }
void taos_init() { pthread_once(&tscinit, taos_init_imp); } int taos_init() { pthread_once(&tscinit, taos_init_imp); return tscInitRes;}
// this function may be called by user or system, or by both simultaneously. // this function may be called by user or system, or by both simultaneously.
void taos_cleanup(void) { void taos_cleanup(void) {

View File

@ -373,6 +373,23 @@ static void taosCheckDataDirCfg() {
} }
} }
static int32_t taosCheckTmpDir(void) {
if (strlen(tsTempDir) <= 0){
uError("tempDir is not set");
return -1;
}
DIR *dir = opendir(tsTempDir);
if (dir == NULL) {
uError("can not open tempDir:%s, error:%s", tsTempDir, strerror(errno));
return -1;
}
closedir(dir);
return 0;
}
static void doInitGlobalConfig(void) { static void doInitGlobalConfig(void) {
osInit(); osInit();
srand(taosSafeRand()); srand(taosSafeRand());
@ -1488,6 +1505,11 @@ int32_t taosCheckGlobalCfg() {
} }
taosCheckDataDirCfg(); taosCheckDataDirCfg();
if (taosCheckTmpDir()) {
return -1;
}
taosGetSystemInfo(); taosGetSystemInfo();
tsSetLocale(); tsSetLocale();

View File

@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup( setuptools.setup(
name="taos", name="taos",
version="2.0.5", version="2.0.6",
author="Taosdata Inc.", author="Taosdata Inc.",
author_email="support@taosdata.com", author_email="support@taosdata.com",
description="TDengine python client package", description="TDengine python client package",

View File

@ -3,12 +3,12 @@ from .connection import TDengineConnection
from .cursor import TDengineCursor from .cursor import TDengineCursor
# Globals # Globals
apilevel = '2.0.3'
threadsafety = 0 threadsafety = 0
paramstyle = 'pyformat' paramstyle = 'pyformat'
__all__ = ['connection', 'cursor'] __all__ = ['connection', 'cursor']
def connect(*args, **kwargs): def connect(*args, **kwargs):
""" Function to return a TDengine connector object """ Function to return a TDengine connector object
@ -21,4 +21,4 @@ def connect(*args, **kwargs):
@rtype: TDengineConnector @rtype: TDengineConnector
""" """
return TDengineConnection(*args, **kwargs) return TDengineConnection(*args, **kwargs)

View File

@ -4,11 +4,14 @@ from .error import *
import math import math
import datetime import datetime
def _convert_millisecond_to_datetime(milli): def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli/1000.0) return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro): def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro/1000000.0) return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime _timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0: if num_of_rows > 0:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
else: else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row """Function to convert C tinyint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row """Function to convert C smallint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row """Function to convert C int row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row """Function to convert C bigint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row """Function to convert C float row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row """Function to convert C double row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else: else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
if num_of_rows >= 0: if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data) tmpstr = ctypes.c_char_p(data)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
else: else:
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows > 0: if num_of_rows > 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows >= 0: if num_of_rows >= 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
tmpstr = ctypes.c_char_p(data+nbytes*i+2) tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
_CONVERT_FUNC = { _CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python, FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
_CONVERT_FUNC_BLOCK = { _CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block, FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python_block FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
# Corresponding TAOS_FIELD structure in C # Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure): class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65), _fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char), ('type', ctypes.c_char),
('bytes', ctypes.c_short)] ('bytes', ctypes.c_short)]
# C interface class # C interface class
class CTaosInterface(object): class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.so') libtaos = ctypes.CDLL('libtaos.so')
@ -216,7 +360,7 @@ class CTaosInterface(object):
except AttributeError: except AttributeError:
raise AttributeError("config is expected as a str") raise AttributeError("config is expected as a str")
if config != None: if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config) CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init() CTaosInterface.libtaos.taos_init()
@ -227,7 +371,13 @@ class CTaosInterface(object):
""" """
return self._config return self._config
def connect(self, host=None, user="root", password="taosdata", db=None, port=0): def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
''' '''
Function to connect to server Function to connect to server
@ -236,7 +386,7 @@ class CTaosInterface(object):
# host # host
try: try:
_host = ctypes.c_char_p(host.encode( _host = ctypes.c_char_p(host.encode(
"utf-8")) if host != None else ctypes.c_char_p(None) "utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("host is expected as a str") raise AttributeError("host is expected as a str")
@ -255,7 +405,7 @@ class CTaosInterface(object):
# db # db
try: try:
_db = ctypes.c_char_p( _db = ctypes.c_char_p(
db.encode("utf-8")) if db != None else ctypes.c_char_p(None) db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("db is expected as a str") raise AttributeError("db is expected as a str")
@ -268,11 +418,11 @@ class CTaosInterface(object):
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect( connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port)) _host, _user, _password, _db, _port))
if connection.value == None: if connection.value is None:
print('connect to TDengine failed') print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed") raise ConnectionError("connect to TDengine failed")
# sys.exit(1) # sys.exit(1)
#else: # else:
# print('connect to TDengine success') # print('connect to TDengine success')
return connection return connection
@ -293,12 +443,13 @@ class CTaosInterface(object):
@rtype: 0 on success and -1 on failure @rtype: 0 on success and -1 on failure
''' '''
try: try:
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8'))) return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError: except AttributeError:
raise AttributeError("sql is expected as a string") raise AttributeError("sql is expected as a string")
# finally: # finally:
# CTaosInterface.libtaos.close(connection) # CTaosInterface.libtaos.close(connection)
@staticmethod @staticmethod
def affectedRows(result): def affectedRows(result):
"""The affected rows after runing query """The affected rows after runing query
@ -308,7 +459,7 @@ class CTaosInterface(object):
@staticmethod @staticmethod
def subscribe(connection, restart, topic, sql, interval): def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription """Create a subscription
@restart boolean, @restart boolean,
@sql string, sql statement for data query, must be a 'select' statement. @sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription @topic string, name of this subscription
""" """
@ -360,38 +511,53 @@ class CTaosInterface(object):
result, ctypes.byref(pblock)) result, ctypes.byref(pblock))
if num_of_rows == 0: if num_of_rows == 0:
return None, 0 return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK: if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)
@staticmethod @staticmethod
def fetchRow(result, fields): def fetchRow(result, fields):
pblock = ctypes.c_void_p(0) pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result) pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock : if pblock:
num_of_rows = 1 num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC: if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError(
"Invalid data type returned from database")
if data is None: if data is None:
blocks[i] = [None] blocks[i] = [None]
else: else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else: else:
return None, 0 return None, 0
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)
@staticmethod @staticmethod
def freeResult(result): def freeResult(result):
CTaosInterface.libtaos.taos_free_result(result) CTaosInterface.libtaos.taos_free_result(result)

View File

@ -2,9 +2,11 @@ from .cursor import TDengineCursor
from .subscription import TDengineSubscription from .subscription import TDengineSubscription
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
class TDengineConnection(object): class TDengineConnection(object):
""" TDengine connection object """ TDengine connection object
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self._conn = None self._conn = None
self._host = None self._host = None
@ -29,7 +31,7 @@ class TDengineConnection(object):
# password # password
if 'password' in kwargs: if 'password' in kwargs:
self._password = kwargs['password'] self._password = kwargs['password']
# database # database
if 'database' in kwargs: if 'database' in kwargs:
self._database = kwargs['database'] self._database = kwargs['database']
@ -43,7 +45,12 @@ class TDengineConnection(object):
self._config = kwargs['config'] self._config = kwargs['config']
self._chandle = CTaosInterface(self._config) self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port) self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self): def close(self):
"""Close current connection. """Close current connection.
@ -55,7 +62,8 @@ class TDengineConnection(object):
""" """
if self._conn is None: if self._conn is None:
return None return None
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval) sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub) return TDengineSubscription(sub)
def cursor(self): def cursor(self):
@ -80,7 +88,8 @@ class TDengineConnection(object):
""" """
pass pass
if __name__ == "__main__": if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107') conn = TDengineConnection(host='192.168.1.107')
conn.close() conn.close()
print("Hello world") print("Hello world")

View File

@ -3,6 +3,7 @@
from .dbapi import * from .dbapi import *
class FieldType(object): class FieldType(object):
"""TDengine Field Types """TDengine Field Types
""" """
@ -18,13 +19,21 @@ class FieldType(object):
C_BINARY = 8 C_BINARY = 8
C_TIMESTAMP = 9 C_TIMESTAMP = 9
C_NCHAR = 10 C_NCHAR = 10
C_TINYINT_UNSIGNED = 12
C_SMALLINT_UNSIGNED = 13
C_INT_UNSIGNED = 14
C_BIGINT_UNSIGNED = 15
# NULL value definition # NULL value definition
# NOTE: These values should change according to C definition in tsdb.h # NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02 C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128 C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768 C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648 C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808 C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan') C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan') C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)]) C_BINARY_NULL = bytearray([int('0xff', 16)])

View File

@ -128,8 +128,8 @@ class TDengineCursor(object):
if errno == 0: if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0: if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows( self._affected_rows += CTaosInterface.affectedRows(
self._result ) self._result)
return CTaosInterface.affectedRows(self._result ) return CTaosInterface.affectedRows(self._result)
else: else:
self._fields = CTaosInterface.useResult( self._fields = CTaosInterface.useResult(
self._result) self._result)
@ -148,6 +148,7 @@ class TDengineCursor(object):
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available. """Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
""" """
pass pass
def fetchmany(self): def fetchmany(self):
pass pass
@ -158,11 +159,26 @@ class TDengineCursor(object):
if (dataType.upper() == "TINYINT"): if (dataType.upper() == "TINYINT"):
if (self._description[col][1] == FieldType.C_TINYINT): if (self._description[col][1] == FieldType.C_TINYINT):
return True return True
if (dataType.upper() == "TINYINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
return True
if (dataType.upper() == "SMALLINT"):
if (self._description[col][1] == FieldType.C_SMALLINT):
return True
if (dataType.upper() == "SMALLINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
return True
if (dataType.upper() == "INT"): if (dataType.upper() == "INT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_INT):
return True return True
if (dataType.upper() == "INT UNSIGNED"):
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
return True
if (dataType.upper() == "BIGINT"): if (dataType.upper() == "BIGINT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_BIGINT):
return True
if (dataType.upper() == "BIGINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
return True return True
if (dataType.upper() == "FLOAT"): if (dataType.upper() == "FLOAT"):
if (self._description[col][1] == FieldType.C_FLOAT): if (self._description[col][1] == FieldType.C_FLOAT):
@ -191,16 +207,20 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0: if num_of_fields == 0:
break break
self._rowcount += num_of_fields self._rowcount += num_of_fields
for i in range(len(self._fields)): for i in range(len(self._fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def fetchall(self): def fetchall(self):
if self._result is None or self._fields is None: if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall") raise OperationalError("Invalid use of fetchall")
@ -208,16 +228,20 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0: if num_of_fields == 0:
break break
self._rowcount += num_of_fields self._rowcount += num_of_fields
for i in range(len(self._fields)): for i in range(len(self._fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def nextset(self): def nextset(self):
""" """
""" """

View File

@ -4,6 +4,7 @@
import time import time
import datetime import datetime
class DBAPITypeObject(object): class DBAPITypeObject(object):
def __init__(self, *values): def __init__(self, *values):
self.values = values self.values = values
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
else: else:
return -1 return -1
Date = datetime.date Date = datetime.date
Time = datetime.time Time = datetime.time
Timestamp = datetime.datetime Timestamp = datetime.datetime
def DataFromTicks(ticks): def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3]) return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks): def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6]) return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks): def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6]) return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types()) # STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types()) # BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types()) # NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types()) # DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject() # ROWID = DBAPITypeObject()

View File

@ -1,35 +1,41 @@
"""Python exceptions """Python exceptions
""" """
class Error(Exception): class Error(Exception):
def __init__(self, msg=None, errno=None): def __init__(self, msg=None, errno=None):
self.msg = msg self.msg = msg
self._full_msg = self.msg self._full_msg = self.msg
self.errno = errno self.errno = errno
def __str__(self): def __str__(self):
return self._full_msg return self._full_msg
class Warning(Exception): class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting. """Exception raised for important warnings like data truncations while inserting.
""" """
pass pass
class InterfaceError(Error): class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself. """Exception raised for errors that are related to the database interface rather than the database itself.
""" """
pass pass
class DatabaseError(Error): class DatabaseError(Error):
"""Exception raised for errors that are related to the database. """Exception raised for errors that are related to the database.
""" """
pass pass
class DataError(DatabaseError): class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range. """Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
""" """
pass pass
class OperationalError(DatabaseError): class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer """Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
""" """
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
""" """
pass pass
class InternalError(DatabaseError): class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error. """Exception raised when the database encounters an internal error.
""" """
pass pass
class ProgrammingError(DatabaseError): class ProgrammingError(DatabaseError):
"""Exception raised for programming errors. """Exception raised for programming errors.
""" """
pass pass
class NotSupportedError(DatabaseError): class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,. """Exception raised in case a method or database API was used which is not supported by the database,.
""" """
pass pass

View File

@ -1,52 +1,57 @@
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
from .error import * from .error import *
class TDengineSubscription(object): class TDengineSubscription(object):
"""TDengine subscription object """TDengine subscription object
""" """
def __init__(self, sub): def __init__(self, sub):
self._sub = sub self._sub = sub
def consume(self): def consume(self):
"""Consume rows of a subscription """Consume rows of a subscription
""" """
if self._sub is None: if self._sub is None:
raise OperationalError("Invalid use of consume") raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub) result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))] buffer = [[] for i in range(len(fields))]
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields) block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0: break if num_of_fields == 0:
break
for i in range(len(fields)): for i in range(len(fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
self.fields = fields self.fields = fields
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
def close(self, keepProgress = True):
"""Close the Subscription. """Close the Subscription.
""" """
if self._sub is None: if self._sub is None:
return False return False
CTaosInterface.unsubscribe(self._sub, keepProgress) CTaosInterface.unsubscribe(self._sub, keepProgress)
return True return True
if __name__ == '__main__': if __name__ == '__main__':
from .connection import TDengineConnection from .connection import TDengineConnection
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test") conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands # Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000) sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0,10): for i in range(0, 10):
data = sub.consume() data = sub.consume()
for d in data: for d in data:
print(d) print(d)
sub.close() sub.close()
conn.close() conn.close()

View File

@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup( setuptools.setup(
name="taos", name="taos",
version="2.0.4", version="2.0.5",
author="Taosdata Inc.", author="Taosdata Inc.",
author_email="support@taosdata.com", author_email="support@taosdata.com",
description="TDengine python client package", description="TDengine python client package",

View File

@ -3,12 +3,12 @@ from .connection import TDengineConnection
from .cursor import TDengineCursor from .cursor import TDengineCursor
# Globals # Globals
apilevel = '2.0.3'
threadsafety = 0 threadsafety = 0
paramstyle = 'pyformat' paramstyle = 'pyformat'
__all__ = ['connection', 'cursor'] __all__ = ['connection', 'cursor']
def connect(*args, **kwargs): def connect(*args, **kwargs):
""" Function to return a TDengine connector object """ Function to return a TDengine connector object

View File

@ -4,11 +4,14 @@ from .error import *
import math import math
import datetime import datetime
def _convert_millisecond_to_datetime(milli): def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli/1000.0) return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro): def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro/1000000.0) return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime _timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0: if num_of_rows > 0:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
else: else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row """Function to convert C tinyint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row """Function to convert C smallint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row """Function to convert C int row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row """Function to convert C bigint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row """Function to convert C float row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row """Function to convert C double row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else: else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
if num_of_rows >= 0: if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data) tmpstr = ctypes.c_char_p(data)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
else: else:
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows > 0: if num_of_rows > 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows >= 0: if num_of_rows >= 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
tmpstr = ctypes.c_char_p(data+nbytes*i+2) tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
_CONVERT_FUNC = { _CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python, FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
_CONVERT_FUNC_BLOCK = { _CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block, FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python_block FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
# Corresponding TAOS_FIELD structure in C # Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure): class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65), _fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char), ('type', ctypes.c_char),
('bytes', ctypes.c_short)] ('bytes', ctypes.c_short)]
# C interface class # C interface class
class CTaosInterface(object): class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.so') libtaos = ctypes.CDLL('libtaos.so')
@ -216,7 +360,7 @@ class CTaosInterface(object):
except AttributeError: except AttributeError:
raise AttributeError("config is expected as a str") raise AttributeError("config is expected as a str")
if config != None: if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config) CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init() CTaosInterface.libtaos.taos_init()
@ -227,7 +371,13 @@ class CTaosInterface(object):
""" """
return self._config return self._config
def connect(self, host=None, user="root", password="taosdata", db=None, port=0): def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
''' '''
Function to connect to server Function to connect to server
@ -236,7 +386,7 @@ class CTaosInterface(object):
# host # host
try: try:
_host = ctypes.c_char_p(host.encode( _host = ctypes.c_char_p(host.encode(
"utf-8")) if host != None else ctypes.c_char_p(None) "utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("host is expected as a str") raise AttributeError("host is expected as a str")
@ -255,7 +405,7 @@ class CTaosInterface(object):
# db # db
try: try:
_db = ctypes.c_char_p( _db = ctypes.c_char_p(
db.encode("utf-8")) if db != None else ctypes.c_char_p(None) db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("db is expected as a str") raise AttributeError("db is expected as a str")
@ -268,11 +418,11 @@ class CTaosInterface(object):
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect( connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port)) _host, _user, _password, _db, _port))
if connection.value == None: if connection.value is None:
print('connect to TDengine failed') print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed") raise ConnectionError("connect to TDengine failed")
# sys.exit(1) # sys.exit(1)
#else: # else:
# print('connect to TDengine success') # print('connect to TDengine success')
return connection return connection
@ -293,7 +443,8 @@ class CTaosInterface(object):
@rtype: 0 on success and -1 on failure @rtype: 0 on success and -1 on failure
''' '''
try: try:
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8'))) return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError: except AttributeError:
raise AttributeError("sql is expected as a string") raise AttributeError("sql is expected as a string")
# finally: # finally:
@ -308,7 +459,7 @@ class CTaosInterface(object):
@staticmethod @staticmethod
def subscribe(connection, restart, topic, sql, interval): def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription """Create a subscription
@restart boolean, @restart boolean,
@sql string, sql statement for data query, must be a 'select' statement. @sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription @topic string, name of this subscription
""" """
@ -360,35 +511,49 @@ class CTaosInterface(object):
result, ctypes.byref(pblock)) result, ctypes.byref(pblock))
if num_of_rows == 0: if num_of_rows == 0:
return None, 0 return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK: if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)
@staticmethod @staticmethod
def fetchRow(result, fields): def fetchRow(result, fields):
pblock = ctypes.c_void_p(0) pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result) pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock : if pblock:
num_of_rows = 1 num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC: if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError(
"Invalid data type returned from database")
if data is None: if data is None:
blocks[i] = [None] blocks[i] = [None]
else: else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else: else:
return None, 0 return None, 0
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)

View File

@ -2,9 +2,11 @@ from .cursor import TDengineCursor
from .subscription import TDengineSubscription from .subscription import TDengineSubscription
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
class TDengineConnection(object): class TDengineConnection(object):
""" TDengine connection object """ TDengine connection object
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self._conn = None self._conn = None
self._host = None self._host = None
@ -29,7 +31,7 @@ class TDengineConnection(object):
# password # password
if 'password' in kwargs: if 'password' in kwargs:
self._password = kwargs['password'] self._password = kwargs['password']
# database # database
if 'database' in kwargs: if 'database' in kwargs:
self._database = kwargs['database'] self._database = kwargs['database']
@ -43,7 +45,12 @@ class TDengineConnection(object):
self._config = kwargs['config'] self._config = kwargs['config']
self._chandle = CTaosInterface(self._config) self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port) self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self): def close(self):
"""Close current connection. """Close current connection.
@ -55,7 +62,8 @@ class TDengineConnection(object):
""" """
if self._conn is None: if self._conn is None:
return None return None
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval) sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub) return TDengineSubscription(sub)
def cursor(self): def cursor(self):
@ -80,7 +88,8 @@ class TDengineConnection(object):
""" """
pass pass
if __name__ == "__main__": if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107') conn = TDengineConnection(host='192.168.1.107')
conn.close() conn.close()
print("Hello world") print("Hello world")

View File

@ -3,6 +3,7 @@
from .dbapi import * from .dbapi import *
class FieldType(object): class FieldType(object):
"""TDengine Field Types """TDengine Field Types
""" """
@ -18,13 +19,21 @@ class FieldType(object):
C_BINARY = 8 C_BINARY = 8
C_TIMESTAMP = 9 C_TIMESTAMP = 9
C_NCHAR = 10 C_NCHAR = 10
C_TINYINT_UNSIGNED = 12
C_SMALLINT_UNSIGNED = 13
C_INT_UNSIGNED = 14
C_BIGINT_UNSIGNED = 15
# NULL value definition # NULL value definition
# NOTE: These values should change according to C definition in tsdb.h # NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02 C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128 C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768 C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648 C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808 C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan') C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan') C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)]) C_BINARY_NULL = bytearray([int('0xff', 16)])

View File

@ -5,6 +5,7 @@ import threading
# querySeqNum = 0 # querySeqNum = 0
class TDengineCursor(object): class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation. """Database cursor which is used to manage the context of a fetch operation.
@ -107,8 +108,8 @@ class TDengineCursor(object):
# if threading.get_ident() != self._threadId: # if threading.get_ident() != self._threadId:
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident()) # info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
# raise OperationalError(info) # raise OperationalError(info)
# print(info) # print(info)
# return None # return None
if not operation: if not operation:
return None return None
@ -137,8 +138,8 @@ class TDengineCursor(object):
if errno == 0: if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0: if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows( self._affected_rows += CTaosInterface.affectedRows(
self._result ) self._result)
return CTaosInterface.affectedRows(self._result ) return CTaosInterface.affectedRows(self._result)
else: else:
self._fields = CTaosInterface.useResult( self._fields = CTaosInterface.useResult(
self._result) self._result)
@ -168,11 +169,26 @@ class TDengineCursor(object):
if (dataType.upper() == "TINYINT"): if (dataType.upper() == "TINYINT"):
if (self._description[col][1] == FieldType.C_TINYINT): if (self._description[col][1] == FieldType.C_TINYINT):
return True return True
if (dataType.upper() == "TINYINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
return True
if (dataType.upper() == "SMALLINT"):
if (self._description[col][1] == FieldType.C_SMALLINT):
return True
if (dataType.upper() == "SMALLINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
return True
if (dataType.upper() == "INT"): if (dataType.upper() == "INT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_INT):
return True return True
if (dataType.upper() == "INT UNSIGNED"):
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
return True
if (dataType.upper() == "BIGINT"): if (dataType.upper() == "BIGINT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_BIGINT):
return True
if (dataType.upper() == "BIGINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
return True return True
if (dataType.upper() == "FLOAT"): if (dataType.upper() == "FLOAT"):
if (self._description[col][1] == FieldType.C_FLOAT): if (self._description[col][1] == FieldType.C_FLOAT):
@ -201,10 +217,13 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0: if num_of_fields == 0:
break break
self._rowcount += num_of_fields self._rowcount += num_of_fields
@ -219,15 +238,20 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
if num_of_fields == 0: break CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields self._rowcount += num_of_fields
for i in range(len(self._fields)): for i in range(len(self._fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def nextset(self): def nextset(self):
""" """
""" """
@ -259,8 +283,8 @@ class TDengineCursor(object):
# if threading.get_ident() != self._threadId: # if threading.get_ident() != self._threadId:
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident()) # info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
# raise OperationalError(info) # raise OperationalError(info)
# print(info) # print(info)
# return None # return None
self._description = [] self._description = []
for ele in self._fields: for ele in self._fields:
@ -268,4 +292,3 @@ class TDengineCursor(object):
(ele['name'], ele['type'], None, None, None, None, False)) (ele['name'], ele['type'], None, None, None, None, False))
return self._result return self._result

View File

@ -4,6 +4,7 @@
import time import time
import datetime import datetime
class DBAPITypeObject(object): class DBAPITypeObject(object):
def __init__(self, *values): def __init__(self, *values):
self.values = values self.values = values
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
else: else:
return -1 return -1
Date = datetime.date Date = datetime.date
Time = datetime.time Time = datetime.time
Timestamp = datetime.datetime Timestamp = datetime.datetime
def DataFromTicks(ticks): def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3]) return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks): def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6]) return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks): def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6]) return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types()) # STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types()) # BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types()) # NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types()) # DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject() # ROWID = DBAPITypeObject()

View File

@ -1,35 +1,41 @@
"""Python exceptions """Python exceptions
""" """
class Error(Exception): class Error(Exception):
def __init__(self, msg=None, errno=None): def __init__(self, msg=None, errno=None):
self.msg = msg self.msg = msg
self._full_msg = self.msg self._full_msg = self.msg
self.errno = errno self.errno = errno
def __str__(self): def __str__(self):
return self._full_msg return self._full_msg
class Warning(Exception): class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting. """Exception raised for important warnings like data truncations while inserting.
""" """
pass pass
class InterfaceError(Error): class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself. """Exception raised for errors that are related to the database interface rather than the database itself.
""" """
pass pass
class DatabaseError(Error): class DatabaseError(Error):
"""Exception raised for errors that are related to the database. """Exception raised for errors that are related to the database.
""" """
pass pass
class DataError(DatabaseError): class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range. """Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
""" """
pass pass
class OperationalError(DatabaseError): class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer """Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
""" """
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
""" """
pass pass
class InternalError(DatabaseError): class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error. """Exception raised when the database encounters an internal error.
""" """
pass pass
class ProgrammingError(DatabaseError): class ProgrammingError(DatabaseError):
"""Exception raised for programming errors. """Exception raised for programming errors.
""" """
pass pass
class NotSupportedError(DatabaseError): class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,. """Exception raised in case a method or database API was used which is not supported by the database,.
""" """
pass pass

View File

@ -1,32 +1,33 @@
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
from .error import * from .error import *
class TDengineSubscription(object): class TDengineSubscription(object):
"""TDengine subscription object """TDengine subscription object
""" """
def __init__(self, sub): def __init__(self, sub):
self._sub = sub self._sub = sub
def consume(self): def consume(self):
"""Consume rows of a subscription """Consume rows of a subscription
""" """
if self._sub is None: if self._sub is None:
raise OperationalError("Invalid use of consume") raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub) result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))] buffer = [[] for i in range(len(fields))]
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields) block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0: break if num_of_fields == 0:
break
for i in range(len(fields)): for i in range(len(fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
self.fields = fields self.fields = fields
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
def close(self, keepProgress = True):
"""Close the Subscription. """Close the Subscription.
""" """
if self._sub is None: if self._sub is None:
@ -38,15 +39,19 @@ class TDengineSubscription(object):
if __name__ == '__main__': if __name__ == '__main__':
from .connection import TDengineConnection from .connection import TDengineConnection
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test") conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands # Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000) sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0,10): for i in range(0, 10):
data = sub.consume() data = sub.consume()
for d in data: for d in data:
print(d) print(d)
sub.close() sub.close()
conn.close() conn.close()

View File

@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
setuptools.setup( setuptools.setup(
name="taos", name="taos",
version="2.0.4", version="2.0.5",
author="Taosdata Inc.", author="Taosdata Inc.",
author_email="support@taosdata.com", author_email="support@taosdata.com",
description="TDengine python client package", description="TDengine python client package",

View File

@ -3,12 +3,12 @@ from .connection import TDengineConnection
from .cursor import TDengineCursor from .cursor import TDengineCursor
# Globals # Globals
apilevel = '2.0.4'
threadsafety = 0 threadsafety = 0
paramstyle = 'pyformat' paramstyle = 'pyformat'
__all__ = ['connection', 'cursor'] __all__ = ['connection', 'cursor']
def connect(*args, **kwargs): def connect(*args, **kwargs):
""" Function to return a TDengine connector object """ Function to return a TDengine connector object

View File

@ -4,11 +4,14 @@ from .error import *
import math import math
import datetime import datetime
def _convert_millisecond_to_datetime(milli): def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli/1000.0) return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro): def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro/1000000.0) return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
@ -18,168 +21,309 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
_timestamp_converter = _convert_microsecond_to_datetime _timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0: if num_of_rows > 0:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
else: else:
return list(map(_timestamp_converter, ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)])) return list(map(_timestamp_converter, ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]))
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row """Function to convert C bool row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row """Function to convert C tinyint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row """Function to convert C smallint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)]] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[:abs(num_of_rows)] ] return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row """Function to convert C int row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row """Function to convert C bigint row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
else: else:
return [ None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)] ] return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_long))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_long))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row """Function to convert C float row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row """Function to convert C double row to python row
""" """
if num_of_rows > 0: if num_of_rows > 0:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else: else:
return [ None if math.isnan(ele) else ele for ele in ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)] ] return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
if num_of_rows > 0: if num_of_rows > 0:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else: else:
return [ None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode('utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]] return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
if num_of_rows >= 0: if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data) tmpstr = ctypes.c_char_p(data)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
else: else:
res.append( (ctypes.cast(data+nbytes*i, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row """Function to convert C binary row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows > 0: if num_of_rows > 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
rbyte=ctypes.cast(data+nbytes*i,ctypes.POINTER(ctypes.c_short))[:1].pop() rbyte = ctypes.cast(
tmpstr = ctypes.c_char_p(data+nbytes*i+2) data + nbytes * i,
res.append( tmpstr.value.decode()[0:rbyte] ) ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False): def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row """Function to convert C nchar row to python row
""" """
assert(nbytes is not None) assert(nbytes is not None)
res=[] res = []
if num_of_rows >= 0: if num_of_rows >= 0:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
tmpstr = ctypes.c_char_p(data+nbytes*i+2) tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append( tmpstr.value.decode() ) res.append(tmpstr.value.decode())
except ValueError: except ValueError:
res.append(None) res.append(None)
else: else:
for i in range(abs(num_of_rows)): for i in range(abs(num_of_rows)):
try: try:
res.append( (ctypes.cast(data+nbytes*i+2, ctypes.POINTER(ctypes.c_wchar * (nbytes//4))))[0].value ) res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError: except ValueError:
res.append(None) res.append(None)
return res return res
_CONVERT_FUNC = { _CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python, FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
_CONVERT_FUNC_BLOCK = { _CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python, FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT : _crow_tinyint_to_python, FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT : _crow_smallint_to_python, FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT : _crow_int_to_python, FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT : _crow_bigint_to_python, FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT : _crow_float_to_python, FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE : _crow_double_to_python, FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block, FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP : _crow_timestamp_to_python, FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR : _crow_nchar_to_python_block FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
} }
# Corresponding TAOS_FIELD structure in C # Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure): class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65), _fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char), ('type', ctypes.c_char),
('bytes', ctypes.c_short)] ('bytes', ctypes.c_short)]
# C interface class # C interface class
class CTaosInterface(object): class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.dylib') libtaos = ctypes.CDLL('libtaos.dylib')
@ -216,7 +360,7 @@ class CTaosInterface(object):
except AttributeError: except AttributeError:
raise AttributeError("config is expected as a str") raise AttributeError("config is expected as a str")
if config != None: if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config) CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init() CTaosInterface.libtaos.taos_init()
@ -227,7 +371,13 @@ class CTaosInterface(object):
""" """
return self._config return self._config
def connect(self, host=None, user="root", password="taosdata", db=None, port=0): def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
''' '''
Function to connect to server Function to connect to server
@ -236,7 +386,7 @@ class CTaosInterface(object):
# host # host
try: try:
_host = ctypes.c_char_p(host.encode( _host = ctypes.c_char_p(host.encode(
"utf-8")) if host != None else ctypes.c_char_p(None) "utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("host is expected as a str") raise AttributeError("host is expected as a str")
@ -255,7 +405,7 @@ class CTaosInterface(object):
# db # db
try: try:
_db = ctypes.c_char_p( _db = ctypes.c_char_p(
db.encode("utf-8")) if db != None else ctypes.c_char_p(None) db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError: except AttributeError:
raise AttributeError("db is expected as a str") raise AttributeError("db is expected as a str")
@ -268,11 +418,11 @@ class CTaosInterface(object):
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect( connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port)) _host, _user, _password, _db, _port))
if connection.value == None: if connection.value is None:
print('connect to TDengine failed') print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed") raise ConnectionError("connect to TDengine failed")
# sys.exit(1) # sys.exit(1)
#else: # else:
# print('connect to TDengine success') # print('connect to TDengine success')
return connection return connection
@ -293,7 +443,8 @@ class CTaosInterface(object):
@rtype: 0 on success and -1 on failure @rtype: 0 on success and -1 on failure
''' '''
try: try:
return CTaosInterface.libtaos.taos_query(connection, ctypes.c_char_p(sql.encode('utf-8'))) return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError: except AttributeError:
raise AttributeError("sql is expected as a string") raise AttributeError("sql is expected as a string")
# finally: # finally:
@ -308,7 +459,7 @@ class CTaosInterface(object):
@staticmethod @staticmethod
def subscribe(connection, restart, topic, sql, interval): def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription """Create a subscription
@restart boolean, @restart boolean,
@sql string, sql statement for data query, must be a 'select' statement. @sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription @topic string, name of this subscription
""" """
@ -360,35 +511,49 @@ class CTaosInterface(object):
result, ctypes.byref(pblock)) result, ctypes.byref(pblock))
if num_of_rows == 0: if num_of_rows == 0:
return None, 0 return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK: if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)
@staticmethod @staticmethod
def fetchRow(result, fields): def fetchRow(result, fields):
pblock = ctypes.c_void_p(0) pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result) pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock : if pblock:
num_of_rows = 1 num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(result) == FieldType.C_TIMESTAMP_MICRO) isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields) blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result) fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [ele for ele in ctypes.cast(fieldL, ctypes.POINTER(ctypes.c_int))[:len(fields)]] fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)): for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i] data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC: if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError("Invalid data type returned from database") raise DatabaseError(
"Invalid data type returned from database")
if data is None: if data is None:
blocks[i] = [None] blocks[i] = [None]
else: else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](data, num_of_rows, fieldLen[i], isMicro) blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else: else:
return None, 0 return None, 0
return blocks, abs(num_of_rows) return blocks, abs(num_of_rows)

View File

@ -2,9 +2,11 @@ from .cursor import TDengineCursor
from .subscription import TDengineSubscription from .subscription import TDengineSubscription
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
class TDengineConnection(object): class TDengineConnection(object):
""" TDengine connection object """ TDengine connection object
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self._conn = None self._conn = None
self._host = None self._host = None
@ -29,7 +31,7 @@ class TDengineConnection(object):
# password # password
if 'password' in kwargs: if 'password' in kwargs:
self._password = kwargs['password'] self._password = kwargs['password']
# database # database
if 'database' in kwargs: if 'database' in kwargs:
self._database = kwargs['database'] self._database = kwargs['database']
@ -43,7 +45,12 @@ class TDengineConnection(object):
self._config = kwargs['config'] self._config = kwargs['config']
self._chandle = CTaosInterface(self._config) self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(self._host, self._user, self._password, self._database, self._port) self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self): def close(self):
"""Close current connection. """Close current connection.
@ -55,7 +62,8 @@ class TDengineConnection(object):
""" """
if self._conn is None: if self._conn is None:
return None return None
sub = CTaosInterface.subscribe(self._conn, restart, topic, sql, interval) sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub) return TDengineSubscription(sub)
def cursor(self): def cursor(self):
@ -80,7 +88,8 @@ class TDengineConnection(object):
""" """
pass pass
if __name__ == "__main__": if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107') conn = TDengineConnection(host='192.168.1.107')
conn.close() conn.close()
print("Hello world") print("Hello world")

View File

@ -3,6 +3,7 @@
from .dbapi import * from .dbapi import *
class FieldType(object): class FieldType(object):
"""TDengine Field Types """TDengine Field Types
""" """
@ -18,13 +19,21 @@ class FieldType(object):
C_BINARY = 8 C_BINARY = 8
C_TIMESTAMP = 9 C_TIMESTAMP = 9
C_NCHAR = 10 C_NCHAR = 10
C_TINYINT_UNSIGNED = 12
C_SMALLINT_UNSIGNED = 13
C_INT_UNSIGNED = 14
C_BIGINT_UNSIGNED = 15
# NULL value definition # NULL value definition
# NOTE: These values should change according to C definition in tsdb.h # NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02 C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128 C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768 C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648 C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808 C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan') C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan') C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)]) C_BINARY_NULL = bytearray([int('0xff', 16)])

View File

@ -5,6 +5,7 @@ import threading
# querySeqNum = 0 # querySeqNum = 0
class TDengineCursor(object): class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation. """Database cursor which is used to manage the context of a fetch operation.
@ -107,8 +108,8 @@ class TDengineCursor(object):
# if threading.get_ident() != self._threadId: # if threading.get_ident() != self._threadId:
# info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident()) # info ="Cursor execute:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
# raise OperationalError(info) # raise OperationalError(info)
# print(info) # print(info)
# return None # return None
if not operation: if not operation:
return None return None
@ -137,8 +138,8 @@ class TDengineCursor(object):
if errno == 0: if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0: if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows( self._affected_rows += CTaosInterface.affectedRows(
self._result ) self._result)
return CTaosInterface.affectedRows(self._result ) return CTaosInterface.affectedRows(self._result)
else: else:
self._fields = CTaosInterface.useResult( self._fields = CTaosInterface.useResult(
self._result) self._result)
@ -168,11 +169,26 @@ class TDengineCursor(object):
if (dataType.upper() == "TINYINT"): if (dataType.upper() == "TINYINT"):
if (self._description[col][1] == FieldType.C_TINYINT): if (self._description[col][1] == FieldType.C_TINYINT):
return True return True
if (dataType.upper() == "TINYINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
return True
if (dataType.upper() == "SMALLINT"):
if (self._description[col][1] == FieldType.C_SMALLINT):
return True
if (dataType.upper() == "SMALLINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
return True
if (dataType.upper() == "INT"): if (dataType.upper() == "INT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_INT):
return True return True
if (dataType.upper() == "INT UNSIGNED"):
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
return True
if (dataType.upper() == "BIGINT"): if (dataType.upper() == "BIGINT"):
if (self._description[col][1] == FieldType.C_INT): if (self._description[col][1] == FieldType.C_BIGINT):
return True
if (dataType.upper() == "BIGINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
return True return True
if (dataType.upper() == "FLOAT"): if (dataType.upper() == "FLOAT"):
if (self._description[col][1] == FieldType.C_FLOAT): if (self._description[col][1] == FieldType.C_FLOAT):
@ -201,10 +217,13 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchRow(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0: if num_of_fields == 0:
break break
self._rowcount += num_of_fields self._rowcount += num_of_fields
@ -219,15 +238,20 @@ class TDengineCursor(object):
buffer = [[] for i in range(len(self._fields))] buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0 self._rowcount = 0
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(self._result, self._fields) block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result) errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0: if errno != 0:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno) raise ProgrammingError(
if num_of_fields == 0: break CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields self._rowcount += num_of_fields
for i in range(len(self._fields)): for i in range(len(self._fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def nextset(self): def nextset(self):
""" """
""" """
@ -259,8 +283,8 @@ class TDengineCursor(object):
# if threading.get_ident() != self._threadId: # if threading.get_ident() != self._threadId:
# info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident()) # info = "Cursor handleresult:Thread ID not match,creater:"+str(self._threadId)+" caller:"+str(threading.get_ident())
# raise OperationalError(info) # raise OperationalError(info)
# print(info) # print(info)
# return None # return None
self._description = [] self._description = []
for ele in self._fields: for ele in self._fields:
@ -268,4 +292,3 @@ class TDengineCursor(object):
(ele['name'], ele['type'], None, None, None, None, False)) (ele['name'], ele['type'], None, None, None, None, False))
return self._result return self._result

View File

@ -4,6 +4,7 @@
import time import time
import datetime import datetime
class DBAPITypeObject(object): class DBAPITypeObject(object):
def __init__(self, *values): def __init__(self, *values):
self.values = values self.values = values
@ -16,23 +17,28 @@ class DBAPITypeObject(object):
else: else:
return -1 return -1
Date = datetime.date Date = datetime.date
Time = datetime.time Time = datetime.time
Timestamp = datetime.datetime Timestamp = datetime.datetime
def DataFromTicks(ticks): def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3]) return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks): def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6]) return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks): def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6]) return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types()) # STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types()) # BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types()) # NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types()) # DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject() # ROWID = DBAPITypeObject()

View File

@ -1,35 +1,41 @@
"""Python exceptions """Python exceptions
""" """
class Error(Exception): class Error(Exception):
def __init__(self, msg=None, errno=None): def __init__(self, msg=None, errno=None):
self.msg = msg self.msg = msg
self._full_msg = self.msg self._full_msg = self.msg
self.errno = errno self.errno = errno
def __str__(self): def __str__(self):
return self._full_msg return self._full_msg
class Warning(Exception): class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting. """Exception raised for important warnings like data truncations while inserting.
""" """
pass pass
class InterfaceError(Error): class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself. """Exception raised for errors that are related to the database interface rather than the database itself.
""" """
pass pass
class DatabaseError(Error): class DatabaseError(Error):
"""Exception raised for errors that are related to the database. """Exception raised for errors that are related to the database.
""" """
pass pass
class DataError(DatabaseError): class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range. """Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
""" """
pass pass
class OperationalError(DatabaseError): class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer """Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
""" """
@ -41,17 +47,20 @@ class IntegrityError(DatabaseError):
""" """
pass pass
class InternalError(DatabaseError): class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error. """Exception raised when the database encounters an internal error.
""" """
pass pass
class ProgrammingError(DatabaseError): class ProgrammingError(DatabaseError):
"""Exception raised for programming errors. """Exception raised for programming errors.
""" """
pass pass
class NotSupportedError(DatabaseError): class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,. """Exception raised in case a method or database API was used which is not supported by the database,.
""" """
pass pass

View File

@ -1,32 +1,33 @@
from .cinterface import CTaosInterface from .cinterface import CTaosInterface
from .error import * from .error import *
class TDengineSubscription(object): class TDengineSubscription(object):
"""TDengine subscription object """TDengine subscription object
""" """
def __init__(self, sub): def __init__(self, sub):
self._sub = sub self._sub = sub
def consume(self): def consume(self):
"""Consume rows of a subscription """Consume rows of a subscription
""" """
if self._sub is None: if self._sub is None:
raise OperationalError("Invalid use of consume") raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub) result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))] buffer = [[] for i in range(len(fields))]
while True: while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields) block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0: break if num_of_fields == 0:
break
for i in range(len(fields)): for i in range(len(fields)):
buffer[i].extend(block[i]) buffer[i].extend(block[i])
self.fields = fields self.fields = fields
return list(map(tuple, zip(*buffer))) return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
def close(self, keepProgress = True):
"""Close the Subscription. """Close the Subscription.
""" """
if self._sub is None: if self._sub is None:
@ -38,15 +39,19 @@ class TDengineSubscription(object):
if __name__ == '__main__': if __name__ == '__main__':
from .connection import TDengineConnection from .connection import TDengineConnection
conn = TDengineConnection(host="127.0.0.1", user="root", password="taosdata", database="test") conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands # Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000) sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0,10): for i in range(0, 10):
data = sub.consume() data = sub.consume()
for d in data: for d in data:
print(d) print(d)
sub.close() sub.close()
conn.close() conn.close()

View File

@ -3,7 +3,6 @@ from .connection import TDengineConnection
from .cursor import TDengineCursor from .cursor import TDengineCursor
# Globals # Globals
apilevel = '2.0.3'
threadsafety = 0 threadsafety = 0
paramstyle = 'pyformat' paramstyle = 'pyformat'
@ -21,4 +20,4 @@ def connect(*args, **kwargs):
@rtype: TDengineConnector @rtype: TDengineConnector
""" """
return TDengineConnection(*args, **kwargs) return TDengineConnection(*args, **kwargs)

View File

@ -3,7 +3,6 @@ from .connection import TDengineConnection
from .cursor import TDengineCursor from .cursor import TDengineCursor
# Globals # Globals
apilevel = '2.0.3'
threadsafety = 0 threadsafety = 0
paramstyle = 'pyformat' paramstyle = 'pyformat'
@ -21,4 +20,4 @@ def connect(*args, **kwargs):
@rtype: TDengineConnector @rtype: TDengineConnector
""" """
return TDengineConnection(*args, **kwargs) return TDengineConnection(*args, **kwargs)

View File

@ -68,7 +68,7 @@ typedef struct taosField {
#define DLL_EXPORT #define DLL_EXPORT
#endif #endif
DLL_EXPORT void taos_init(); DLL_EXPORT int taos_init();
DLL_EXPORT void taos_cleanup(void); DLL_EXPORT void taos_cleanup(void);
DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...); DLL_EXPORT int taos_options(TSDB_OPTION option, const void *arg, ...);
DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port); DLL_EXPORT TAOS *taos_connect(const char *ip, const char *user, const char *pass, const char *db, uint16_t port);

View File

@ -122,8 +122,8 @@
#define TK_UNSIGNED 103 #define TK_UNSIGNED 103
#define TK_TAGS 104 #define TK_TAGS 104
#define TK_USING 105 #define TK_USING 105
#define TK_AS 106 #define TK_COMMA 106
#define TK_COMMA 107 #define TK_AS 107
#define TK_NULL 108 #define TK_NULL 108
#define TK_SELECT 109 #define TK_SELECT 109
#define TK_UNION 110 #define TK_UNION 110
@ -228,6 +228,7 @@
#define TK_VALUES 209 #define TK_VALUES 209
#define TK_SPACE 300 #define TK_SPACE 300
#define TK_COMMENT 301 #define TK_COMMENT 301
#define TK_ILLEGAL 302 #define TK_ILLEGAL 302

View File

@ -76,7 +76,11 @@ TAOS *shellInit(SShellArguments *args) {
args->user = TSDB_DEFAULT_USER; args->user = TSDB_DEFAULT_USER;
} }
taos_init(); if (taos_init()) {
printf("failed to init taos\n");
fflush(stdout);
return NULL;
}
// Connect to the database. // Connect to the database.
TAOS *con = NULL; TAOS *con = NULL;

View File

@ -110,7 +110,10 @@ int main(int argc, char* argv[]) {
} }
if (args.netTestRole && args.netTestRole[0] != 0) { if (args.netTestRole && args.netTestRole[0] != 0) {
taos_init(); if (taos_init()) {
printf("Failed to init taos");
exit(EXIT_FAILURE);
}
taosNetTest(args.netTestRole, args.host, args.port, args.pktLen); taosNetTest(args.netTestRole, args.host, args.port, args.pktLen);
exit(0); exit(0);
} }

View File

@ -711,7 +711,11 @@ int main(int argc, char *argv[]) {
fprintf(fp, "###################################################################\n\n"); fprintf(fp, "###################################################################\n\n");
fprintf(fp, "| WRecords | Records/Second | Requests/Second | WLatency(ms) |\n"); fprintf(fp, "| WRecords | Records/Second | Requests/Second | WLatency(ms) |\n");
taos_init(); if (taos_init()) {
fprintf(stderr, "Failed to init taos\n");
return 1;
}
TAOS *taos = taos_connect(ip_addr, user, pass, NULL, port); TAOS *taos = taos_connect(ip_addr, user, pass, NULL, port);
if (taos == NULL) { if (taos == NULL) {
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL)); fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));

View File

@ -1971,7 +1971,11 @@ static int createSuperTable(TAOS * taos, char* dbName, SSuperTable* superTbls,
static int createDatabases() { static int createDatabases() {
TAOS * taos = NULL; TAOS * taos = NULL;
int ret = 0; int ret = 0;
taos_init(); if (taos_init()) {
fprintf(stderr, "Failed to init taos\n");
exit(-1);
}
taos = taos_connect(g_Dbs.host, g_Dbs.user, g_Dbs.password, NULL, g_Dbs.port); taos = taos_connect(g_Dbs.host, g_Dbs.user, g_Dbs.password, NULL, g_Dbs.port);
if (taos == NULL) { if (taos == NULL) {
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL)); fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
@ -4496,7 +4500,11 @@ void *subQueryProcess(void *sarg) {
int queryTestProcess() { int queryTestProcess() {
TAOS * taos = NULL; TAOS * taos = NULL;
taos_init(); if (taos_init()) {
fprintf(stderr, "Failed to init taos\n");
exit(-1);
}
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, NULL, g_queryInfo.port); taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, NULL, g_queryInfo.port);
if (taos == NULL) { if (taos == NULL) {
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL)); fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));
@ -4772,7 +4780,11 @@ int subscribeTestProcess() {
} }
TAOS * taos = NULL; TAOS * taos = NULL;
taos_init(); if (taos_init()) {
fprintf(stderr, "Failed to init taos\n");
exit(-1);
}
taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, g_queryInfo.dbName, g_queryInfo.port); taos = taos_connect(g_queryInfo.host, g_queryInfo.user, g_queryInfo.password, g_queryInfo.dbName, g_queryInfo.port);
if (taos == NULL) { if (taos == NULL) {
fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL)); fprintf(stderr, "Failed to connect to TDengine, reason:%s\n", taos_errstr(NULL));

View File

@ -103,7 +103,9 @@ int32_t monInitSystem() {
} }
int32_t monStartSystem() { int32_t monStartSystem() {
taos_init(); if (taos_init()) {
return -1;
}
tsMonitor.start = 1; tsMonitor.start = 1;
monExecuteSQLFp = monExecuteSQL; monExecuteSQLFp = monExecuteSQL;
monInfo("monitor module start"); monInfo("monitor module start");

View File

@ -76,6 +76,7 @@ typedef struct SQuerySQL {
typedef struct SCreatedTableInfo { typedef struct SCreatedTableInfo {
SStrToken name; // table name token SStrToken name; // table name token
SStrToken stableName; // super table name token , for using clause SStrToken stableName; // super table name token , for using clause
SArray *pTagNames; // create by using super table, tag name
SArray *pTagVals; // create by using super table, tag value SArray *pTagVals; // create by using super table, tag value
char *fullname; // table full name char *fullname; // table full name
STagData tagdata; // true tag data, super table full name is in STagData STagData tagdata; // true tag data, super table full name is in STagData
@ -246,7 +247,7 @@ SCreateTableSQL *tSetCreateSqlElems(SArray *pCols, SArray *pTags, SQuerySQL *pSe
void tSqlExprNodeDestroy(tSQLExpr *pExpr); void tSqlExprNodeDestroy(tSQLExpr *pExpr);
SAlterTableInfo * tAlterTableSqlElems(SStrToken *pTableName, SArray *pCols, SArray *pVals, int32_t type, int16_t tableTable); SAlterTableInfo * tAlterTableSqlElems(SStrToken *pTableName, SArray *pCols, SArray *pVals, int32_t type, int16_t tableTable);
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists); SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagNames, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists);
void destroyAllSelectClause(SSubclauseInfo *pSql); void destroyAllSelectClause(SSubclauseInfo *pSql);
void doDestroyQuerySql(SQuerySQL *pSql); void doDestroyQuerySql(SQuerySQL *pSql);

View File

@ -356,9 +356,20 @@ create_stable_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) LP columnlist(X) RP T
create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) TAGS LP tagitemlist(Y) RP. { create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) TAGS LP tagitemlist(Y) RP. {
X.n += F.n; X.n += F.n;
V.n += Z.n; V.n += Z.n;
A = createNewChildTableInfo(&X, Y, &V, &U); A = createNewChildTableInfo(&X, NULL, Y, &V, &U);
} }
create_from_stable(A) ::= ifnotexists(U) ids(V) cpxName(Z) USING ids(X) cpxName(F) LP tagNamelist(P) RP TAGS LP tagitemlist(Y) RP. {
X.n += F.n;
V.n += Z.n;
A = createNewChildTableInfo(&X, P, Y, &V, &U);
}
%type tagNamelist{SArray*}
%destructor tagNamelist {taosArrayDestroy($$);}
tagNamelist(A) ::= tagNamelist(X) COMMA ids(Y). {taosArrayPush(X, &Y); A = X; }
tagNamelist(A) ::= ids(X). {A = taosArrayInit(4, sizeof(SStrToken)); taosArrayPush(A, &X);}
// create stream // create stream
// create table table_name as select count(*) from super_table_name interval(time) // create table table_name as select count(*) from super_table_name interval(time)
create_table_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) AS select(S). { create_table_args(A) ::= ifnotexists(U) ids(V) cpxName(Z) AS select(S). {

View File

@ -2574,12 +2574,16 @@ static void bottom_function(SQLFunctionCtx *pCtx) {
STopBotInfo *pRes = getTopBotOutputInfo(pCtx); STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
if ((void *)pRes->res[0] != (void *)((char *)pRes + sizeof(STopBotInfo) + POINTER_BYTES * pCtx->param[0].i64)) {
buildTopBotStruct(pRes, pCtx);
}
for (int32_t i = 0; i < pCtx->size; ++i) { for (int32_t i = 0; i < pCtx->size; ++i) {
char *data = GET_INPUT_DATA(pCtx, i); char *data = GET_INPUT_DATA(pCtx, i);
TSKEY ts = GET_TS_DATA(pCtx, i); TSKEY ts = GET_TS_DATA(pCtx, i);
if (pCtx->hasNull && isNull(data, pCtx->inputType)) { if (pCtx->hasNull && isNull(data, pCtx->inputType)) {
continue; continue;
} }
notNullElems++; notNullElems++;
@ -2608,6 +2612,11 @@ static void bottom_function_f(SQLFunctionCtx *pCtx, int32_t index) {
} }
STopBotInfo *pRes = getTopBotOutputInfo(pCtx); STopBotInfo *pRes = getTopBotOutputInfo(pCtx);
if ((void *)pRes->res[0] != (void *)((char *)pRes + sizeof(STopBotInfo) + POINTER_BYTES * pCtx->param[0].i64)) {
buildTopBotStruct(pRes, pCtx);
}
SET_VAL(pCtx, 1, 1); SET_VAL(pCtx, 1, 1);
do_bottom_function_add(pRes, (int32_t)pCtx->param[0].i64, pData, ts, pCtx->inputType, &pCtx->tagInfo, NULL, 0); do_bottom_function_add(pRes, (int32_t)pCtx->param[0].i64, pData, ts, pCtx->inputType, &pCtx->tagInfo, NULL, 0);

View File

@ -3785,7 +3785,7 @@ void setResultRowOutputBufInitCtx(SQueryRuntimeEnv *pRuntimeEnv, SResultRow *pRe
if (functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM || functionId == TSDB_FUNC_DIFF) { if (functionId == TSDB_FUNC_TOP || functionId == TSDB_FUNC_BOTTOM || functionId == TSDB_FUNC_DIFF) {
pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].pOutput; pCtx->ptsOutputBuf = pRuntimeEnv->pCtx[0].pOutput;
} }
if (!pCtx->resultInfo->initialized) { if (!pCtx->resultInfo->initialized) {
aAggs[functionId].init(pCtx); aAggs[functionId].init(pCtx);
} }

View File

@ -496,7 +496,8 @@ static void freeVariant(void *pItem) {
} }
void freeCreateTableInfo(void* p) { void freeCreateTableInfo(void* p) {
SCreatedTableInfo* pInfo = (SCreatedTableInfo*) p; SCreatedTableInfo* pInfo = (SCreatedTableInfo*) p;
taosArrayDestroy(pInfo->pTagNames);
taosArrayDestroyEx(pInfo->pTagVals, freeVariant); taosArrayDestroyEx(pInfo->pTagVals, freeVariant);
tfree(pInfo->fullname); tfree(pInfo->fullname);
tfree(pInfo->tagdata.data); tfree(pInfo->tagdata.data);
@ -574,11 +575,12 @@ SCreateTableSQL *tSetCreateSqlElems(SArray *pCols, SArray *pTags, SQuerySQL *pSe
return pCreate; return pCreate;
} }
SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists) { SCreatedTableInfo createNewChildTableInfo(SStrToken *pTableName, SArray *pTagNames, SArray *pTagVals, SStrToken *pToken, SStrToken* igExists) {
SCreatedTableInfo info; SCreatedTableInfo info;
memset(&info, 0, sizeof(SCreatedTableInfo)); memset(&info, 0, sizeof(SCreatedTableInfo));
info.name = *pToken; info.name = *pToken;
info.pTagNames = pTagNames;
info.pTagVals = pTagVals; info.pTagVals = pTagVals;
info.stableName = *pTableName; info.stableName = *pTableName;
info.igExist = (igExists->n > 0)? 1:0; info.igExist = (igExists->n > 0)? 1:0;

File diff suppressed because it is too large Load Diff

View File

@ -50,7 +50,8 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
STsdbMeta *pMeta = pRepo->tsdbMeta; STsdbMeta *pMeta = pRepo->tsdbMeta;
STable * super = NULL; STable * super = NULL;
STable * table = NULL; STable * table = NULL;
int newSuper = 0; bool newSuper = false;
bool superChanged = false;
int tid = pCfg->tableId.tid; int tid = pCfg->tableId.tid;
STable * pTable = NULL; STable * pTable = NULL;
@ -85,7 +86,7 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
if (pCfg->type == TSDB_CHILD_TABLE) { if (pCfg->type == TSDB_CHILD_TABLE) {
super = tsdbGetTableByUid(pMeta, pCfg->superUid); super = tsdbGetTableByUid(pMeta, pCfg->superUid);
if (super == NULL) { // super table not exists, try to create it if (super == NULL) { // super table not exists, try to create it
newSuper = 1; newSuper = true;
super = tsdbCreateTableFromCfg(pCfg, true); super = tsdbCreateTableFromCfg(pCfg, true);
if (super == NULL) goto _err; if (super == NULL) goto _err;
} else { } else {
@ -93,6 +94,17 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
terrno = TSDB_CODE_TDB_IVD_CREATE_TABLE_INFO; terrno = TSDB_CODE_TDB_IVD_CREATE_TABLE_INFO;
goto _err; goto _err;
} }
if (schemaVersion(pCfg->tagSchema) > schemaVersion(super->tagSchema)) {
// tag schema out of date, need to update super table tag version
STSchema *pOldSchema = super->tagSchema;
TSDB_WLOCK_TABLE(super);
super->tagSchema = tdDupSchema(pCfg->tagSchema);
TSDB_WUNLOCK_TABLE(super);
tdFreeSchema(pOldSchema);
superChanged = true;
}
} }
} }
@ -117,7 +129,7 @@ int tsdbCreateTable(STsdbRepo *repo, STableCfg *pCfg) {
// TODO: refactor duplicate codes // TODO: refactor duplicate codes
int tlen = 0; int tlen = 0;
void *pBuf = NULL; void *pBuf = NULL;
if (newSuper) { if (newSuper || superChanged) {
tlen = tsdbGetTableEncodeSize(TSDB_UPDATE_META, super); tlen = tsdbGetTableEncodeSize(TSDB_UPDATE_META, super);
pBuf = tsdbAllocBytes(pRepo, tlen); pBuf = tsdbAllocBytes(pRepo, tlen);
if (pBuf == NULL) goto _err; if (pBuf == NULL) goto _err;

View File

@ -1,6 +1,8 @@
CMAKE_MINIMUM_REQUIRED(VERSION 2.8) CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
PROJECT(TDengine) PROJECT(TDengine)
ADD_DEFINITIONS(-DWAL_CHECKSUM_WHOLE)
INCLUDE_DIRECTORIES(inc) INCLUDE_DIRECTORIES(inc)
AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SRC) AUX_SOURCE_DIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/src SRC)

View File

@ -111,6 +111,28 @@ void walRemoveAllOldFiles(void *handle) {
pthread_mutex_unlock(&pWal->mutex); pthread_mutex_unlock(&pWal->mutex);
} }
#if defined(WAL_CHECKSUM_WHOLE)
static void walUpdateChecksum(SWalHead *pHead) {
pHead->sver = 1;
pHead->cksum = 0;
pHead->cksum = taosCalcChecksum(0, (uint8_t *)pHead, sizeof(*pHead) + pHead->len);
}
static int walValidateChecksum(SWalHead *pHead) {
if (pHead->sver == 0) { // for compatible with wal before sver 1
return taosCheckChecksumWhole((uint8_t *)pHead, sizeof(*pHead));
} else if (pHead->sver == 1) {
uint32_t cksum = pHead->cksum;
pHead->cksum = 0;
return taosCheckChecksum((uint8_t *)pHead, sizeof(*pHead) + pHead->len, cksum);
}
return 0;
}
#endif
int32_t walWrite(void *handle, SWalHead *pHead) { int32_t walWrite(void *handle, SWalHead *pHead) {
if (handle == NULL) return -1; if (handle == NULL) return -1;
@ -123,7 +145,13 @@ int32_t walWrite(void *handle, SWalHead *pHead) {
if (pHead->version <= pWal->version) return 0; if (pHead->version <= pWal->version) return 0;
pHead->signature = WAL_SIGNATURE; pHead->signature = WAL_SIGNATURE;
#if defined(WAL_CHECKSUM_WHOLE)
walUpdateChecksum(pHead);
#else
pHead->sver = 0;
taosCalcChecksumAppend(0, (uint8_t *)pHead, sizeof(SWalHead)); taosCalcChecksumAppend(0, (uint8_t *)pHead, sizeof(SWalHead));
#endif
int32_t contLen = pHead->len + sizeof(SWalHead); int32_t contLen = pHead->len + sizeof(SWalHead);
pthread_mutex_lock(&pWal->mutex); pthread_mutex_lock(&pWal->mutex);
@ -246,16 +274,40 @@ static int32_t walSkipCorruptedRecord(SWal *pWal, SWalHead *pHead, int64_t tfd,
continue; continue;
} }
#if defined(WAL_CHECKSUM_WHOLE)
if (pHead->sver == 0 && walValidateChecksum(pHead)) {
wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos);
*offset = pos;
return TSDB_CODE_SUCCESS;
}
if (pHead->sver == 1) {
if (tfRead(tfd, pHead->cont, pHead->len) < pHead->len) {
wError("vgId:%d, read to end of corrupted wal file, offset:%" PRId64, pWal->vgId, pos);
return TSDB_CODE_WAL_FILE_CORRUPTED;
}
if (walValidateChecksum(pHead)) {
wInfo("vgId:%d, wal whole cksum check passed, offset:%" PRId64, pWal->vgId, pos);
*offset = pos;
return TSDB_CODE_SUCCESS;
}
}
#else
if (taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) { if (taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos); wInfo("vgId:%d, wal head cksum check passed, offset:%" PRId64, pWal->vgId, pos);
*offset = pos; *offset = pos;
return TSDB_CODE_SUCCESS; return TSDB_CODE_SUCCESS;
} }
#endif
} }
return TSDB_CODE_WAL_FILE_CORRUPTED; return TSDB_CODE_WAL_FILE_CORRUPTED;
} }
static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, char *name, int64_t fileId) { static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, char *name, int64_t fileId) {
int32_t size = WAL_MAX_SIZE; int32_t size = WAL_MAX_SIZE;
void * buffer = tmalloc(size); void * buffer = tmalloc(size);
@ -293,6 +345,51 @@ static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, ch
break; break;
} }
#if defined(WAL_CHECKSUM_WHOLE)
if (pHead->sver == 0 && !walValidateChecksum(pHead)) {
wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
pHead->version, pHead->len, offset);
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
if (code != TSDB_CODE_SUCCESS) {
walFtruncate(pWal, tfd, offset);
break;
}
}
if (pHead->len < 0 || pHead->len > size - sizeof(SWalHead)) {
wError("vgId:%d, file:%s, wal head len out of range, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
pHead->version, pHead->len, offset);
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
if (code != TSDB_CODE_SUCCESS) {
walFtruncate(pWal, tfd, offset);
break;
}
}
ret = (int32_t)tfRead(tfd, pHead->cont, pHead->len);
if (ret < 0) {
wError("vgId:%d, file:%s, failed to read wal body since %s", pWal->vgId, name, strerror(errno));
code = TAOS_SYSTEM_ERROR(errno);
break;
}
if (ret < pHead->len) {
wError("vgId:%d, file:%s, failed to read wal body, ret:%d len:%d", pWal->vgId, name, ret, pHead->len);
offset += sizeof(SWalHead);
continue;
}
if (pHead->sver == 1 && !walValidateChecksum(pHead)) {
wError("vgId:%d, file:%s, wal whole cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
pHead->version, pHead->len, offset);
code = walSkipCorruptedRecord(pWal, pHead, tfd, &offset);
if (code != TSDB_CODE_SUCCESS) {
walFtruncate(pWal, tfd, offset);
break;
}
}
#else
if (!taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) { if (!taosCheckChecksumWhole((uint8_t *)pHead, sizeof(SWalHead))) {
wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name, wError("vgId:%d, file:%s, wal head cksum is messed up, hver:%" PRIu64 " len:%d offset:%" PRId64, pWal->vgId, name,
pHead->version, pHead->len, offset); pHead->version, pHead->len, offset);
@ -326,6 +423,7 @@ static int32_t walRestoreWalFile(SWal *pWal, void *pVnode, FWalWrite writeFp, ch
continue; continue;
} }
#endif
offset = offset + sizeof(SWalHead) + pHead->len; offset = offset + sizeof(SWalHead) + pHead->len;
wTrace("vgId:%d, restore wal, fileId:%" PRId64 " hver:%" PRIu64 " wver:%" PRIu64 " len:%d", pWal->vgId, wTrace("vgId:%d, restore wal, fileId:%" PRId64 " hver:%" PRIu64 " wver:%" PRIu64 " len:%d", pWal->vgId,

7
tests/Jenkinsfile vendored
View File

@ -109,6 +109,13 @@ pipeline {
java --class-path=../../../../src/connector/jdbc/target:$JAVA_HOME/jre/lib/ext -jar target/JDBCDemo-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1 java --class-path=../../../../src/connector/jdbc/target:$JAVA_HOME/jre/lib/ext -jar target/JDBCDemo-SNAPSHOT-jar-with-dependencies.jar -host 127.0.0.1
''' '''
} }
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh '''
cp -rf ${WKC}/tests/examples/nodejs ${JENKINS_HOME}/workspace/
cd ${JENKINS_HOME}/workspace/nodejs
node nodejsChecker.js host=localhost
'''
}
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {
sh ''' sh '''
cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC# cd ${JENKINS_HOME}/workspace/C#NET/src/CheckC#

View File

@ -62,7 +62,10 @@ int main(int argc, char *argv[]) {
} }
// init TAOS // init TAOS
taos_init(); if (taos_init()) {
exit(1);
}
TAOS *taos = taos_connect(argv[1], "root", "taosdata", NULL, 0); TAOS *taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
if (taos == NULL) { if (taos == NULL) {
printf("failed to connect to server, reason:%s\n", "null taos"/*taos_errstr(taos)*/); printf("failed to connect to server, reason:%s\n", "null taos"/*taos_errstr(taos)*/);

View File

@ -23,7 +23,10 @@ int main(int argc, char *argv[])
} }
// init TAOS // init TAOS
taos_init(); if (taos_init()) {
printf("failed to init taos\n");
exit(1);
}
taos = taos_connect(argv[1], "root", "taosdata", NULL, 0); taos = taos_connect(argv[1], "root", "taosdata", NULL, 0);
if (taos == NULL) { if (taos == NULL) {

View File

@ -55,7 +55,10 @@ int main(int argc, char *argv[])
} }
// init TAOS // init TAOS
taos_init(); if (taos_init()) {
printf("failed to init taos\n");
exit(1);
}
strcpy(db_name, argv[2]); strcpy(db_name, argv[2]);
strcpy(tbl_name, argv[3]); strcpy(tbl_name, argv[3]);

View File

@ -217,7 +217,10 @@ int main(int argc, char *argv[]) {
} }
// init TAOS // init TAOS
taos_init(); if (taos_init()) {
printf("failed to init taos\n");
exit(1);
}
TAOS* taos = taos_connect(host, user, passwd, "", 0); TAOS* taos = taos_connect(host, user, passwd, "", 0);
if (taos == NULL) { if (taos == NULL) {

1
tests/examples/rust Submodule

@ -0,0 +1 @@
Subproject commit 1c8924dc668e6aa848214c2fc54e3ace3f5bf8df

View File

@ -39,6 +39,8 @@ function buildTDengine {
cd $WORK_DIR/TDengine cd $WORK_DIR/TDengine
git remote update > /dev/null git remote update > /dev/null
git reset --hard HEAD
git checkout develop
REMOTE_COMMIT=`git rev-parse --short remotes/origin/develop` REMOTE_COMMIT=`git rev-parse --short remotes/origin/develop`
LOCAL_COMMIT=`git rev-parse --short @` LOCAL_COMMIT=`git rev-parse --short @`
@ -54,15 +56,16 @@ function buildTDengine {
cd debug cd debug
rm -rf * rm -rf *
cmake .. > /dev/null cmake .. > /dev/null
make > /dev/null make && make install > /dev/null
make install
fi fi
} }
function runQueryPerfTest { function runQueryPerfTest {
[ -f $PERFORMANCE_TEST_REPORT ] && rm $PERFORMANCE_TEST_REPORT [ -f $PERFORMANCE_TEST_REPORT ] && rm $PERFORMANCE_TEST_REPORT
nohup $WORK_DIR/TDengine/debug/build/bin/taosd -c /etc/taosperf/ > /dev/null 2>&1 & nohup $WORK_DIR/TDengine/debug/build/bin/taosd -c /etc/taosperf/ > /dev/null 2>&1 &
echoInfo "Run Performance Test" echoInfo "Wait TDengine to start"
sleep 60
echoInfo "Run Performance Test"
cd $WORK_DIR/TDengine/tests/pytest cd $WORK_DIR/TDengine/tests/pytest
python3 query/queryPerformance.py -c $LOCAL_COMMIT | tee -a $PERFORMANCE_TEST_REPORT python3 query/queryPerformance.py -c $LOCAL_COMMIT | tee -a $PERFORMANCE_TEST_REPORT
@ -104,6 +107,7 @@ function sendReport {
stopTaosd stopTaosd
buildTDengine buildTDengine
runQueryPerfTest runQueryPerfTest
stopTaosd
echoInfo "Send Report" echoInfo "Send Report"
sendReport sendReport

View File

@ -2,15 +2,20 @@ FROM ubuntu:latest AS builder
ARG PACKAGE=TDengine-server-1.6.5.10-Linux-x64.tar.gz ARG PACKAGE=TDengine-server-1.6.5.10-Linux-x64.tar.gz
ARG EXTRACTDIR=TDengine-enterprise-server ARG EXTRACTDIR=TDengine-enterprise-server
ARG TARBITRATORPKG=TDengine-tarbitrator-1.6.5.10-Linux-x64.tar.gz
ARG EXTRACTDIR2=TDengine-enterprise-arbitrator
ARG CONTENT=taos.tar.gz ARG CONTENT=taos.tar.gz
WORKDIR /root WORKDIR /root
COPY ${PACKAGE} . COPY ${PACKAGE} .
COPY ${TARBITRATORPKG} .
RUN tar -zxf ${PACKAGE} RUN tar -zxf ${PACKAGE}
RUN tar -zxf ${TARBITRATORPKG}
RUN mv ${EXTRACTDIR}/driver ./lib RUN mv ${EXTRACTDIR}/driver ./lib
RUN tar -zxf ${EXTRACTDIR}/${CONTENT} RUN tar -zxf ${EXTRACTDIR}/${CONTENT}
RUN mv ${EXTRACTDIR2}/bin/* /root/bin
FROM ubuntu:latest FROM ubuntu:latest
@ -19,8 +24,10 @@ WORKDIR /root
RUN apt-get update RUN apt-get update
RUN apt-get install -y vim tmux net-tools RUN apt-get install -y vim tmux net-tools
RUN echo 'alias ll="ls -l --color=auto"' >> /root/.bashrc RUN echo 'alias ll="ls -l --color=auto"' >> /root/.bashrc
RUN ulimit -c unlimited
COPY --from=builder /root/bin/taosd /usr/bin COPY --from=builder /root/bin/taosd /usr/bin
COPY --from=builder /root/bin/tarbitrator /usr/bin
COPY --from=builder /root/bin/taos /usr/bin COPY --from=builder /root/bin/taos /usr/bin
COPY --from=builder /root/cfg/taos.cfg /etc/taos/ COPY --from=builder /root/cfg/taos.cfg /etc/taos/
COPY --from=builder /root/lib/libtaos.so.* /usr/lib/libtaos.so.1 COPY --from=builder /root/lib/libtaos.so.* /usr/lib/libtaos.so.1
@ -29,8 +36,8 @@ ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib"
ENV LC_CTYPE=en_US.UTF-8 ENV LC_CTYPE=en_US.UTF-8
ENV LANG=en_US.UTF-8 ENV LANG=en_US.UTF-8
EXPOSE 6030-6041/tcp 6060/tcp 6030-6039/udp EXPOSE 6030-6042/tcp 6060/tcp 6030-6039/udp
# VOLUME [ "/var/lib/taos", "/var/log/taos", "/etc/taos" ] # VOLUME [ "/var/lib/taos", "/var/log/taos", "/etc/taos" ]
CMD [ "bash" ] CMD [ "bash" ]

View File

@ -12,15 +12,89 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import os import os
import taos
import random import random
import argparse
class ClusterTestcase: class BuildDockerCluser:
def __init__(self, hostName, user, password, configDir, numOfNodes, clusterVersion):
self.hostName = hostName
self.user = user
self.password = password
self.configDir = configDir
self.numOfNodes = numOfNodes
self.clusterVersion = clusterVersion
def getConnection(self):
self.conn = taos.connect(
host = self.hostName,
user = self.user,
password = self.password,
config = self.configDir)
def createDondes(self):
self.cursor = self.conn.cursor()
for i in range(2, self.numOfNodes + 1):
self.cursor.execute("create dnode tdnode%d" % i)
def startArbitrator(self):
print("start arbitrator")
os.system("docker exec -d $(docker ps|grep tdnode1|awk '{print $1}') tarbitrator")
def run(self): def run(self):
os.system("./buildClusterEnv.sh -n 3 -v 2.0.14.1") if self.numOfNodes < 2 or self.numOfNodes > 5:
os.system("yes|taosdemo -h 172.27.0.7 -n 100 -t 100 -x") print("the number of nodes must be between 2 and 5")
os.system("python3 ../../concurrent_inquiry.py -H 172.27.0.7 -T 4 -t 4 -l 10") exit(0)
os.system("./buildClusterEnv.sh -n %d -v %s" % (self.numOfNodes, self.clusterVersion))
self.getConnection()
self.createDondes()
self.startArbitrator()
clusterTest = ClusterTestcase() parser = argparse.ArgumentParser()
clusterTest.run() parser.add_argument(
'-H',
'--host',
action='store',
default='tdnode1',
type=str,
help='host name to be connected (default: tdnode1)')
parser.add_argument(
'-u',
'--user',
action='store',
default='root',
type=str,
help='user (default: root)')
parser.add_argument(
'-p',
'--password',
action='store',
default='taosdata',
type=str,
help='password (default: taosdata)')
parser.add_argument(
'-c',
'--config-dir',
action='store',
default='/etc/taos',
type=str,
help='configuration directory (default: /etc/taos)')
parser.add_argument(
'-n',
'--num-of-nodes',
action='store',
default=2,
type=int,
help='number of nodes in the cluster (default: 2, min: 2, max: 5)')
parser.add_argument(
'-v',
'--version',
action='store',
default='2.0.14.1',
type=str,
help='the version of the cluster to be build, Default is 2.0.14.1')
args = parser.parse_args()
cluster = BuildDockerCluser(args.host, args.user, args.password, args.config_dir, args.num_of_nodes, args.version)
cluster.run()

View File

@ -27,18 +27,28 @@ do
esac esac
done done
function addTaoscfg {
for i in {1..5}
do
touch /data/node$i/cfg/taos.cfg
echo 'firstEp tdnode1:6030' > /data/node$i/cfg/taos.cfg
echo 'fqdn tdnode'$i >> /data/node$i/cfg/taos.cfg
echo 'arbitrator tdnode1:6042' >> /data/node$i/cfg/taos.cfg
done
}
function createDIR { function createDIR {
for i in {1.. $2} for i in {1..5}
do do
mkdir -p /data/node$i/data mkdir -p /data/node$i/data
mkdir -p /data/node$i/log mkdir -p /data/node$i/log
mkdir -p /data/node$i/cfg mkdir -p /data/node$i/cfg
mkdir -p /data/node$i/core
done done
} }
function cleanEnv { function cleanEnv {
for i in {1..3} for i in {1..5}
do do
echo /data/node$i/data/* echo /data/node$i/data/*
rm -rf /data/node$i/data/* rm -rf /data/node$i/data/*
@ -54,49 +64,60 @@ function prepareBuild {
rm -rf $CURR_DIR/../../../../release/* rm -rf $CURR_DIR/../../../../release/*
fi fi
cd $CURR_DIR/../../../../packaging if [ ! -e $DOCKER_DIR/TDengine-server-$VERSION-Linux-x64.tar.gz ] || [ ! -e $DOCKER_DIR/TDengine-arbitrator-$VERSION-Linux-x64.tar.gz ]; then
./release.sh -v edge -n $VERSION >> /dev/null cd $CURR_DIR/../../../../packaging
echo "generating TDeninger packages"
./release.sh -v edge -n $VERSION >> /dev/null
if [ ! -f $CURR_DIR/../../../../release/TDengine-server-$VERSION-Linux-x64.tar.gz ]; then if [ ! -e $CURR_DIR/../../../../release/TDengine-server-$VERSION-Linux-x64.tar.gz ]; then
echo "no TDengine install package found" echo "no TDengine install package found"
exit 1 exit 1
fi
if [ ! -e $CURR_DIR/../../../../release/TDengine-arbitrator-$VERSION-Linux-x64.tar.gz ]; then
echo "no arbitrator install package found"
exit 1
fi
cd $CURR_DIR/../../../../release
mv TDengine-server-$VERSION-Linux-x64.tar.gz $DOCKER_DIR
mv TDengine-arbitrator-$VERSION-Linux-x64.tar.gz $DOCKER_DIR
fi fi
cd $CURR_DIR/../../../../release
mv TDengine-server-$VERSION-Linux-x64.tar.gz $DOCKER_DIR
rm -rf $DOCKER_DIR/*.yml rm -rf $DOCKER_DIR/*.yml
cd $CURR_DIR cd $CURR_DIR
cp docker-compose.yml $DOCKER_DIR cp *.yml $DOCKER_DIR
cp Dockerfile $DOCKER_DIR cp Dockerfile $DOCKER_DIR
if [ $NUM_OF_NODES -eq 4 ]; then
cp ../node4.yml $DOCKER_DIR
fi
if [ $NUM_OF_NODES -eq 5 ]; then
cp ../node5.yml $DOCKER_DIR
fi
} }
function clusterUp { function clusterUp {
echo "docker compose start"
cd $DOCKER_DIR cd $DOCKER_DIR
if [ $NUM_OF_NODES -eq 2 ]; then
echo "create 2 dnodes"
PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION DIR2=TDengine-arbitrator-$VERSION VERSION=$VERSION docker-compose up -d
fi
if [ $NUM_OF_NODES -eq 3 ]; then if [ $NUM_OF_NODES -eq 3 ]; then
PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION VERSION=$VERSION docker-compose up -d PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION DIR2=TDengine-arbitrator-$VERSION VERSION=$VERSION docker-compose -f docker-compose.yml -f node3.yml up -d
fi fi
if [ $NUM_OF_NODES -eq 4 ]; then if [ $NUM_OF_NODES -eq 4 ]; then
PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION VERSION=$VERSION docker-compose -f docker-compose.yml -f node4.yml up -d PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION DIR2=TDengine-arbitrator-$VERSION VERSION=$VERSION docker-compose -f docker-compose.yml -f node3.yml -f node4.yml up -d
fi fi
if [ $NUM_OF_NODES -eq 5 ]; then if [ $NUM_OF_NODES -eq 5 ]; then
PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION VERSION=$VERSION docker-compose -f docker-compose.yml -f node4.yml -f node5.yml up -d PACKAGE=TDengine-server-$VERSION-Linux-x64.tar.gz DIR=TDengine-server-$VERSION DIR2=TDengine-arbitrator-$VERSION VERSION=$VERSION docker-compose -f docker-compose.yml -f node3.yml -f node4.yml -f node5.yml up -d
fi fi
echo "docker compose finish"
} }
cleanEnv createDIR
cleanEnv
addTaoscfg
prepareBuild prepareBuild
clusterUp clusterUp

View File

@ -1,14 +1,16 @@
version: '3.7' version: '3.7'
services: services:
td2.0-node1: td2.0-node1:
build: build:
context: . context: .
args: args:
- PACKAGE=${PACKAGE} - PACKAGE=${PACKAGE}
- TARBITRATORPKG=${TARBITRATORPKG}
- EXTRACTDIR=${DIR} - EXTRACTDIR=${DIR}
- EXTRACTDIR2=${DIR2}
image: 'tdengine:${VERSION}' image: 'tdengine:${VERSION}'
container_name: 'td2.0-node1' container_name: 'tdnode1'
cap_add: cap_add:
- ALL - ALL
stdin_open: true stdin_open: true
@ -18,7 +20,15 @@ services:
command: > command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone && echo $TZ > /etc/timezone &&
mkdir /coredump &&
echo 'kernel.core_pattern=/coredump/core_%e_%p' >> /etc/sysctl.conf &&
sysctl -p &&
exec my-main-application" exec my-main-application"
extra_hosts:
- "tdnode2:172.27.0.8"
- "tdnode3:172.27.0.9"
- "tdnode4:172.27.0.10"
- "tdnode5:172.27.0.11"
volumes: volumes:
# bind data directory # bind data directory
- type: bind - type: bind
@ -32,14 +42,18 @@ services:
- type: bind - type: bind
source: /data/node1/cfg source: /data/node1/cfg
target: /etc/taos target: /etc/taos
# bind core dump path
- type: bind
source: /data/node1/core
target: /coredump
- type: bind - type: bind
source: /data source: /data
target: /root target: /root
hostname: node1 hostname: tdnode1
networks: networks:
taos_update_net: taos_update_net:
ipv4_address: 172.27.0.7 ipv4_address: 172.27.0.7
command: taosd command: taosd
td2.0-node2: td2.0-node2:
build: build:
@ -48,7 +62,7 @@ services:
- PACKAGE=${PACKAGE} - PACKAGE=${PACKAGE}
- EXTRACTDIR=${DIR} - EXTRACTDIR=${DIR}
image: 'tdengine:${VERSION}' image: 'tdengine:${VERSION}'
container_name: 'td2.0-node2' container_name: 'tdnode2'
cap_add: cap_add:
- ALL - ALL
stdin_open: true stdin_open: true
@ -58,7 +72,15 @@ services:
command: > command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone && echo $TZ > /etc/timezone &&
mkdir /coredump &&
echo 'kernel.core_pattern=/coredump/core_%e_%p' >> /etc/sysctl.conf &&
sysctl -p &&
exec my-main-application" exec my-main-application"
extra_hosts:
- "tdnode1:172.27.0.7"
- "tdnode3:172.27.0.9"
- "tdnode4:172.27.0.10"
- "tdnode5:172.27.0.11"
volumes: volumes:
# bind data directory # bind data directory
- type: bind - type: bind
@ -72,52 +94,19 @@ services:
- type: bind - type: bind
source: /data/node2/cfg source: /data/node2/cfg
target: /etc/taos target: /etc/taos
# bind core dump path
- type: bind
source: /data/node2/core
target: /coredump
- type: bind - type: bind
source: /data source: /data
target: /root target: /root
hostname: tdnode2
networks: networks:
taos_update_net: taos_update_net:
ipv4_address: 172.27.0.8 ipv4_address: 172.27.0.8
command: taosd command: taosd
td2.0-node3:
build:
context: .
args:
- PACKAGE=${PACKAGE}
- EXTRACTDIR=${DIR}
image: 'tdengine:${VERSION}'
container_name: 'td2.0-node3'
cap_add:
- ALL
stdin_open: true
tty: true
environment:
TZ: "Asia/Shanghai"
command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone &&
exec my-main-application"
volumes:
# bind data directory
- type: bind
source: /data/node3/data
target: /var/lib/taos
# bind log directory
- type: bind
source: /data/node3/log
target: /var/log/taos
# bind configuration
- type: bind
source: /data/node3/cfg
target: /etc/taos
- type: bind
source: /data
target: /root
networks:
taos_update_net:
ipv4_address: 172.27.0.9
command: taosd
networks: networks:
taos_update_net: taos_update_net:

View File

@ -0,0 +1,55 @@
{
"filetype": "insert",
"cfgdir": "/etc/taos",
"host": "127.0.0.1",
"port": 6030,
"user": "root",
"password": "taosdata",
"thread_count": 4,
"thread_count_create_tbl": 1,
"result_file": "./insert_res.txt",
"confirm_parameter_prompt": "no",
"databases": [{
"dbinfo": {
"name": "db",
"drop": "no",
"replica": 1,
"days": 2,
"cache": 16,
"blocks": 8,
"precision": "ms",
"keep": 365,
"minRows": 100,
"maxRows": 4096,
"comp":2,
"walLevel":1,
"quorum":1,
"fsync":3000,
"update": 0
},
"super_tables": [{
"name": "stb",
"child_table_exists":"no",
"childtable_count": 1,
"childtable_prefix": "stb_",
"auto_create_table": "no",
"data_source": "rand",
"insert_mode": "taosc",
"insert_rate": 0,
"insert_rows": 100000,
"multi_thread_write_one_tbl": "no",
"number_of_tbl_in_one_sql": 1,
"rows_per_tbl": 100,
"max_sql_len": 1024000,
"disorder_ratio": 0,
"disorder_range": 1000,
"timestamp_step": 10,
"start_timestamp": "2020-10-01 00:00:00.000",
"sample_format": "csv",
"sample_file": "./sample.csv",
"tags_file": "",
"columns": [{"type": "INT"}, {"type": "DOUBLE", "count":10}, {"type": "BINARY", "len": 16, "count":3}, {"type": "BINARY", "len": 32, "count":6}],
"tags": [{"type": "TINYINT", "count":2}, {"type": "BINARY", "len": 16, "count":5}]
}]
}]
}

View File

@ -0,0 +1,54 @@
version: '3.7'
services:
td2.0-node3:
build:
context: .
args:
- PACKAGE=${PACKAGE}
- EXTRACTDIR=${DIR}
image: 'tdengine:${VERSION}'
container_name: 'tdnode3'
cap_add:
- ALL
stdin_open: true
tty: true
environment:
TZ: "Asia/Shanghai"
command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone &&
mkdir /coredump &&
echo 'kernel.core_pattern=/coredump/core_%e_%p' >> /etc/sysctl.conf &&
sysctl -p &&
exec my-main-application"
extra_hosts:
- "tdnode1:172.27.0.7"
- "tdnode2:172.27.0.8"
- "tdnode4:172.27.0.10"
- "tdnode5:172.27.0.11"
volumes:
# bind data directory
- type: bind
source: /data/node3/data
target: /var/lib/taos
# bind log directory
- type: bind
source: /data/node3/log
target: /var/log/taos
# bind configuration
- type: bind
source: /data/node3/cfg
target: /etc/taos
# bind core dump path
- type: bind
source: /data/node3/core
target: /coredump
- type: bind
source: /data
target: /root
hostname: tdnode3
networks:
taos_update_net:
ipv4_address: 172.27.0.9
command: taosd

View File

@ -7,8 +7,8 @@ services:
args: args:
- PACKAGE=${PACKAGE} - PACKAGE=${PACKAGE}
- EXTRACTDIR=${DIR} - EXTRACTDIR=${DIR}
image: 'tdengine:2.0.13.1' image: 'tdengine:${VERSION}'
container_name: 'td2.0-node4' container_name: 'tdnode4'
cap_add: cap_add:
- ALL - ALL
stdin_open: true stdin_open: true
@ -18,7 +18,15 @@ services:
command: > command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone && echo $TZ > /etc/timezone &&
mkdir /coredump &&
echo 'kernel.core_pattern=/coredump/core_%e_%p' >> /etc/sysctl.conf &&
sysctl -p &&
exec my-main-application" exec my-main-application"
extra_hosts:
- "tdnode2:172.27.0.8"
- "tdnode3:172.27.0.9"
- "tdnode4:172.27.0.10"
- "tdnode5:172.27.0.11"
volumes: volumes:
# bind data directory # bind data directory
- type: bind - type: bind
@ -32,9 +40,14 @@ services:
- type: bind - type: bind
source: /data/node4/cfg source: /data/node4/cfg
target: /etc/taos target: /etc/taos
# bind core dump path
- type: bind
source: /data/node4/core
target: /coredump
- type: bind - type: bind
source: /data source: /data
target: /root target: /root
hostname: tdnode4
networks: networks:
taos_update_net: taos_update_net:
ipv4_address: 172.27.0.10 ipv4_address: 172.27.0.10

View File

@ -7,8 +7,8 @@ services:
args: args:
- PACKAGE=${PACKAGE} - PACKAGE=${PACKAGE}
- EXTRACTDIR=${DIR} - EXTRACTDIR=${DIR}
image: 'tdengine:2.0.13.1' image: 'tdengine:${VERSION}'
container_name: 'td2.0-node5' container_name: 'tdnode5'
cap_add: cap_add:
- ALL - ALL
stdin_open: true stdin_open: true
@ -18,7 +18,15 @@ services:
command: > command: >
sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && sh -c "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime &&
echo $TZ > /etc/timezone && echo $TZ > /etc/timezone &&
mkdir /coredump &&
echo 'kernel.core_pattern=/coredump/core_%e_%p' >> /etc/sysctl.conf &&
sysctl -p &&
exec my-main-application" exec my-main-application"
extra_hosts:
- "tdnode2:172.27.0.8"
- "tdnode3:172.27.0.9"
- "tdnode4:172.27.0.10"
- "tdnode5:172.27.0.11"
volumes: volumes:
# bind data directory # bind data directory
- type: bind - type: bind
@ -32,9 +40,14 @@ services:
- type: bind - type: bind
source: /data/node5/cfg source: /data/node5/cfg
target: /etc/taos target: /etc/taos
# bind core dump path
- type: bind
source: /data/node5/core
target: /coredump
- type: bind - type: bind
source: /data source: /data
target: /root target: /root
hostname: tdnode5
networks: networks:
taos_update_net: taos_update_net:
ipv4_address: 172.27.0.11 ipv4_address: 172.27.0.11

View File

@ -0,0 +1,142 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import os
import random
import argparse
class taosdemoxWrapper:
def __init__(self, host, metadata, database, tables, threads, configDir, replica,
columnType, columnsPerTable, rowsPerTable, disorderRatio, disorderRange, charTypeLen):
self.host = host
self.metadata = metadata
self.database = database
self.tables = tables
self.threads = threads
self.configDir = configDir
self.replica = replica
self.columnType = columnType
self.columnsPerTable = columnsPerTable
self.rowsPerTable = rowsPerTable
self.disorderRatio = disorderRatio
self.disorderRange = disorderRange
self.charTypeLen = charTypeLen
def run(self):
if self.metadata is None:
os.system("taosdemox -h %s -d %s -t %d -T %d -c %s -a %d -b %s -n %d -t %d -O %d -R %d -w %d -x -y"
% (self.host, self.database, self.tables, self.threads, self.configDir, self.replica, self.columnType,
self.rowsPerTable, self.disorderRatio, self.disorderRange, self.charTypeLen))
else:
os.system("taosdemox -f %s" % self.metadata)
parser = argparse.ArgumentParser()
parser.add_argument(
'-H',
'--host-name',
action='store',
default='tdnode1',
type=str,
help='host name to be connected (default: tdnode1)')
parser.add_argument(
'-f',
'--metadata',
action='store',
default=None,
type=str,
help='The meta data to execution procedure, if use -f, all other options invalid, Default is NULL')
parser.add_argument(
'-d',
'--db-name',
action='store',
default='test',
type=str,
help='Database name to be created (default: test)')
parser.add_argument(
'-t',
'--num-of-tables',
action='store',
default=10,
type=int,
help='Number of tables (default: 10000)')
parser.add_argument(
'-T',
'--num-of-threads',
action='store',
default=10,
type=int,
help='Number of rest threads (default: 10)')
parser.add_argument(
'-c',
'--config-dir',
action='store',
default='/etc/taos/',
type=str,
help='Configuration directory. (Default is /etc/taos/)')
parser.add_argument(
'-a',
'--replica',
action='store',
default=100,
type=int,
help='Set the replica parameters of the database (default: 1, min: 1, max: 3)')
parser.add_argument(
'-b',
'--column-type',
action='store',
default='int',
type=str,
help='the data_type of columns (default: TINYINT,SMALLINT,INT,BIGINT,FLOAT,DOUBLE,BINARY,NCHAR,BOOL,TIMESTAMP)')
parser.add_argument(
'-l',
'--num-of-cols',
action='store',
default=10,
type=int,
help='The number of columns per record (default: 10)')
parser.add_argument(
'-n',
'--num-of-rows',
action='store',
default=1000,
type=int,
help='Number of subtales per stable (default: 1000)')
parser.add_argument(
'-O',
'--disorder-ratio',
action='store',
default=0,
type=int,
help=' (0: in order, > 0: disorder ratio, default: 0)')
parser.add_argument(
'-R',
'--disorder-range',
action='store',
default=0,
type=int,
help='Out of order datas range, ms (default: 1000)')
parser.add_argument(
'-w',
'--char-type-length',
action='store',
default=16,
type=int,
help='Out of order datas range, ms (default: 16)')
args = parser.parse_args()
taosdemox = taosdemoxWrapper(args.host_name, args.metadata, args.db_name, args.num_of_tables,
args.num_of_threads, args.config_dir, args.replica, args.column_type, args.num_of_cols,
args.num_of_rows, args.disorder_ratio, args.disorder_range, args.char_type_length)
taosdemox.run()

View File

@ -354,10 +354,11 @@ class ThreadCoordinator:
# end, and maybe signal them to stop # end, and maybe signal them to stop
if isinstance(err, CrashGenError): # our own transition failure if isinstance(err, CrashGenError): # our own transition failure
Logging.info("State transition error") Logging.info("State transition error")
# TODO: saw an error here once, let's print out stack info for err?
traceback.print_stack() traceback.print_stack()
transitionFailed = True transitionFailed = True
self._te = None # Not running any more self._te = None # Not running any more
self._execStats.registerFailure("State transition error") self._execStats.registerFailure("State transition error: {}".format(err))
else: else:
raise raise
# return transitionFailed # Why did we have this??!! # return transitionFailed # Why did we have this??!!
@ -882,8 +883,12 @@ class StateMechine:
self._stateWeights = [1, 2, 10, 40] self._stateWeights = [1, 2, 10, 40]
def init(self, dbc: DbConn): # late initailization, don't save the dbConn def init(self, dbc: DbConn): # late initailization, don't save the dbConn
self._curState = self._findCurrentState(dbc) # starting state try:
Logging.debug("Found Starting State: {}".format(self._curState)) self._curState = self._findCurrentState(dbc) # starting state
except taos.error.ProgrammingError as err:
Logging.error("Failed to initialized state machine, cannot find current state: {}".format(err))
traceback.print_stack()
raise # re-throw
# TODO: seems no lnoger used, remove? # TODO: seems no lnoger used, remove?
def getCurrentState(self): def getCurrentState(self):
@ -951,6 +956,8 @@ class StateMechine:
# We transition the system to a new state by examining the current state itself # We transition the system to a new state by examining the current state itself
def transition(self, tasks, dbc: DbConn): def transition(self, tasks, dbc: DbConn):
global gSvcMgr
if (len(tasks) == 0): # before 1st step, or otherwise empty if (len(tasks) == 0): # before 1st step, or otherwise empty
Logging.debug("[STT] Starting State: {}".format(self._curState)) Logging.debug("[STT] Starting State: {}".format(self._curState))
return # do nothing return # do nothing
@ -2370,7 +2377,7 @@ class MainExec:
'-n', '-n',
'--dynamic-db-table-names', '--dynamic-db-table-names',
action='store_true', action='store_true',
help='Use non-fixed names for dbs/tables, useful for multi-instance executions (default: false)') help='Use non-fixed names for dbs/tables, for -b, useful for multi-instance executions (default: false)')
parser.add_argument( parser.add_argument(
'-o', '-o',
'--num-dnodes', '--num-dnodes',

View File

@ -15,6 +15,7 @@ from util.log import *
from .misc import Logging, CrashGenError, Helper, Dice from .misc import Logging, CrashGenError, Helper, Dice
import os import os
import datetime import datetime
import traceback
# from .service_manager import TdeInstance # from .service_manager import TdeInstance
class DbConn: class DbConn:
@ -349,6 +350,7 @@ class DbConnNative(DbConn):
def execute(self, sql): def execute(self, sql):
if (not self.isOpen): if (not self.isOpen):
traceback.print_stack()
raise CrashGenError( raise CrashGenError(
"Cannot exec SQL unless db connection is open", CrashGenError.DB_CONNECTION_NOT_OPEN) "Cannot exec SQL unless db connection is open", CrashGenError.DB_CONNECTION_NOT_OPEN)
Logging.debug("[SQL] Executing SQL: {}".format(sql)) Logging.debug("[SQL] Executing SQL: {}".format(sql))
@ -361,6 +363,7 @@ class DbConnNative(DbConn):
def query(self, sql): # return rows affected def query(self, sql): # return rows affected
if (not self.isOpen): if (not self.isOpen):
traceback.print_stack()
raise CrashGenError( raise CrashGenError(
"Cannot query database until connection is open, restarting?", CrashGenError.DB_CONNECTION_NOT_OPEN) "Cannot query database until connection is open, restarting?", CrashGenError.DB_CONNECTION_NOT_OPEN)
Logging.debug("[SQL] Executing SQL: {}".format(sql)) Logging.debug("[SQL] Executing SQL: {}".format(sql))

View File

@ -66,7 +66,7 @@ python3 ./test.py -f tag_lite/int.py
python3 ./test.py -f tag_lite/set.py python3 ./test.py -f tag_lite/set.py
python3 ./test.py -f tag_lite/smallint.py python3 ./test.py -f tag_lite/smallint.py
python3 ./test.py -f tag_lite/tinyint.py python3 ./test.py -f tag_lite/tinyint.py
python3 ./test.py -f tag_lite/alter_tag.py
#python3 ./test.py -f dbmgmt/database-name-boundary.py #python3 ./test.py -f dbmgmt/database-name-boundary.py
python3 ./test.py -f import_merge/importBlock1HO.py python3 ./test.py -f import_merge/importBlock1HO.py

View File

@ -0,0 +1,56 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
from util.log import *
from util.cases import *
from util.sql import *
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
def run(self):
tdSql.prepare()
ret = tdSql.execute(
'create table tb (ts timestamp, speed int unsigned)')
insertRows = 10
tdLog.info("insert %d rows" % (insertRows))
for i in range(0, insertRows):
ret = tdSql.execute(
'insert into tb values (now + %dm, %d)' %
(i, i))
tdLog.info("insert earlier data")
tdSql.execute('insert into tb values (now - 5m , 10)')
tdSql.execute('insert into tb values (now - 6m , 10)')
tdSql.execute('insert into tb values (now - 7m , 10)')
tdSql.execute('insert into tb values (now - 8m , 4294967294)')
tdSql.error('insert into tb values (now - 9m, -1)')
tdSql.error('insert into tb values (now - 9m, 4294967295)')
tdSql.query("select * from tb")
tdSql.checkRows(insertRows + 4)
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())

View File

@ -16,10 +16,10 @@ python3 ./test.py -f insert/nchar.py
python3 ./test.py -f insert/nchar-unicode.py python3 ./test.py -f insert/nchar-unicode.py
python3 ./test.py -f insert/multi.py python3 ./test.py -f insert/multi.py
python3 ./test.py -f insert/randomNullCommit.py python3 ./test.py -f insert/randomNullCommit.py
#python3 insert/retentionpolicy.py python3 insert/retentionpolicy.py
python3 ./test.py -f insert/alterTableAndInsert.py python3 ./test.py -f insert/alterTableAndInsert.py
python3 ./test.py -f insert/insertIntoTwoTables.py python3 ./test.py -f insert/insertIntoTwoTables.py
#python3 ./test.py -f insert/before_1970.py python3 ./test.py -f insert/before_1970.py
python3 bug2265.py python3 bug2265.py
#table #table

View File

@ -23,4 +23,11 @@ python3 ./test.py -f functions/function_sum.py -r 1
python3 ./test.py -f functions/function_top.py -r 1 python3 ./test.py -f functions/function_top.py -r 1
python3 ./test.py -f functions/function_twa.py -r 1 python3 ./test.py -f functions/function_twa.py -r 1
python3 ./test.py -f functions/function_twa_test2.py python3 ./test.py -f functions/function_twa_test2.py
python3 ./test.py -f functions/function_stddev_td2555.pyhao python3 ./test.py -f functions/function_stddev_td2555.py
python3 ./test.py -f insert/metadataUpdate.py
python3 ./test.py -f tools/taosdemoTest2.py
python3 ./test.py -f query/last_cache.py
python3 ./test.py -f query/last_row_cache.py
python3 ./test.py -f account/account_create.py
python3 ./test.py -f alter/alter_table.py
python3 ./test.py -f query/queryGroupbySort.py

View File

@ -0,0 +1,52 @@
###################################################################
# Copyright (c) 2016 by TAOS Technologies, Inc.
# All rights reserved.
#
# This file is proprietary and confidential to TAOS Technologies.
# No part of this file may be reproduced, stored, transmitted,
# disclosed or used in any form or by any means other than as
# expressly provided by the written permission from Jianhui Tao
#
###################################################################
# -*- coding: utf-8 -*-
import sys
import taos
from util.log import tdLog
from util.cases import tdCases
from util.sql import tdSql
from util.dnodes import tdDnodes
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug("start to execute %s" % __file__)
tdSql.init(conn.cursor(), logSql)
self.ts = 1538548685000
def run(self):
tdSql.prepare()
print("==============step1")
tdSql.execute(
"CREATE TABLE IF NOT EXISTS ampere (ts TIMESTAMP(8),ampere DOUBLE(8)) TAGS (device_name BINARY(50),build_id BINARY(50),project_id BINARY(50),alias BINARY(50))")
tdSql.execute("insert into d1001 using ampere tags('test', '2', '2', '2') VALUES (now, 123)")
tdSql.execute("ALTER TABLE ampere ADD TAG variable_id BINARY(50)")
print("==============step2")
tdSql.execute("insert into d1002 using ampere tags('test', '2', '2', '2', 'test') VALUES (now, 124)")
tdSql.query("select * from ampere")
tdSql.checkRows(2)
tdSql.checkData(0, 6, None)
tdSql.checkData(1, 6, 'test')
def stop(self):
tdSql.close()
tdLog.success("%s successfully executed" % __file__)
tdCases.addWindows(__file__, TDTestCase())
tdCases.addLinux(__file__, TDTestCase())

View File

@ -0,0 +1,162 @@
system sh/stop_dnodes.sh
system sh/deploy.sh -n dnode1 -i 1
system sh/cfg.sh -n dnode1 -c walLevel -v 0
system sh/cfg.sh -n dnode1 -c maxtablesPerVnode -v 2
system sh/exec.sh -n dnode1 -s start
sleep 100
sql connect
print ======================== dnode1 start
$db = testdb
sql create database $db
sql use $db
sql create stable st2 (ts timestamp, f1 int) tags (id int, t1 int, t2 nchar(4), t3 double)
sql insert into tb1 using st2 (id, t1) tags(1,2) values (now, 1)
sql select id,t1,t2,t3 from tb1
if $rows != 1 then
return -1
endi
if $data00 != 1 then
return -1
endi
if $data01 != 2 then
return -1
endi
if $data02 != NULL then
return -1
endi
if $data03 != NULL then
return -1
endi
sql create table tb2 using st2 (t2,t3) tags ("12",22.0)
sql select id,t1,t2,t3 from tb2;
if $rows != 1 then
return -1
endi
if $data00 != NULL then
return -1
endi
if $data01 != NULL then
return -1
endi
if $data02 != 12 then
return -1
endi
if $data03 != 22.000000000 then
return -1
endi
sql create table tb3 using st2 tags (1,2,"3",33.0);
sql select id,t1,t2,t3 from tb3;
if $rows != 1 then
return -1
endi
if $data00 != 1 then
return -1
endi
if $data01 != 2 then
return -1
endi
if $data02 != 3 then
return -1
endi
if $data03 != 33.000000000 then
return -1
endi
sql insert into tb4 using st2 tags(1,2,"33",44.0) values (now, 1);
sql select id,t1,t2,t3 from tb4;
if $rows != 1 then
return -1
endi
if $data00 != 1 then
return -1
endi
if $data01 != 2 then
return -1
endi
if $data02 != 33 then
return -1
endi
if $data03 != 44.000000000 then
return -1
endi
sql_error create table tb5 using st2() tags (3,3,"3",33.0);
sql_error create table tb6 using st2 (id,t1) tags (3,3,"3",33.0);
sql_error create table tb7 using st2 (id,t1) tags (3);
sql_error create table tb8 using st2 (ide) tags (3);
sql_error create table tb9 using st2 (id);
sql_error create table tb10 using st2 (id t1) tags (1,1);
sql_error create table tb10 using st2 (id,,t1) tags (1,1,1);
sql_error create table tb11 using st2 (id,t1,) tags (1,1,1);
sql create table tb12 using st2 (t1,id) tags (2,1);
sql select id,t1,t2,t3 from tb12;
if $rows != 1 then
return -1
endi
if $data00 != 1 then
return -1
endi
if $data01 != 2 then
return -1
endi
if $data02 != NULL then
return -1
endi
if $data03 != NULL then
return -1
endi
sql create table tb13 using st2 ("t1",'id') tags (2,1);
sql select id,t1,t2,t3 from tb13;
if $rows != 1 then
return -1
endi
if $data00 != 1 then
return -1
endi
if $data01 != 2 then
return -1
endi
if $data02 != NULL then
return -1
endi
if $data03 != NULL then
return -1
endi
system sh/exec.sh -n dnode1 -s stop -x SIGINT

View File

@ -72,4 +72,3 @@ cd ../../../debug; make
./test.sh -f unique/cluster/cache.sim ./test.sh -f unique/cluster/cache.sim
./test.sh -f unique/cluster/vgroup100.sim ./test.sh -f unique/cluster/vgroup100.sim
./test.sh -f unique/column/replica3.sim

View File

@ -35,4 +35,12 @@
./test.sh -f general/stable/refcount.sim ./test.sh -f general/stable/refcount.sim
./test.sh -f general/stable/show.sim ./test.sh -f general/stable/show.sim
./test.sh -f general/stable/values.sim ./test.sh -f general/stable/values.sim
./test.sh -f general/stable/vnode3.sim ./test.sh -f general/stable/vnode3.sim
./test.sh -f unique/column/replica3.sim
./test.sh -f issue/TD-2713.sim
./test.sh -f general/parser/select_distinct_tag.sim
./test.sh -f unique/mnode/mgmt30.sim
./test.sh -f issue/TD-2677.sim
./test.sh -f issue/TD-2680.sim
./test.sh -f unique/dnode/lossdata.sim

View File

@ -81,7 +81,9 @@ char *simParseHostName(char *varName) {
} }
bool simSystemInit() { bool simSystemInit() {
taos_init(); if (taos_init()) {
return false;
}
taosGetFqdn(simHostName); taosGetFqdn(simHostName);
simInitsimCmdList(); simInitsimCmdList();
memset(simScriptList, 0, sizeof(SScript *) * MAX_MAIN_SCRIPT_NUM); memset(simScriptList, 0, sizeof(SScript *) * MAX_MAIN_SCRIPT_NUM);