Merge branch 'master' into cq
This commit is contained in:
commit
7617a973f8
|
@ -94,7 +94,7 @@ def pre_test(){
|
||||||
make > /dev/null
|
make > /dev/null
|
||||||
make install > /dev/null
|
make install > /dev/null
|
||||||
cd ${WKC}/tests
|
cd ${WKC}/tests
|
||||||
pip3 install ${WKC}/src/connector/python/linux/python3/
|
pip3 install ${WKC}/src/connector/python/
|
||||||
'''
|
'''
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
|
@ -399,27 +399,22 @@ Python连接器的使用参见[视频教程](https://www.taosdata.com/blog/2020/
|
||||||
|
|
||||||
#### Linux
|
#### Linux
|
||||||
|
|
||||||
用户可以在源代码的src/connector/python(或者tar.gz的/connector/python)文件夹下找到python2和python3的connector安装包。用户可以通过pip命令安装:
|
用户可以在源代码的src/connector/python(或者tar.gz的/connector/python)文件夹下找到connector安装包。用户可以通过pip命令安装:
|
||||||
|
|
||||||
`pip install src/connector/python/linux/python2/`
|
`pip install src/connector/python/`
|
||||||
|
|
||||||
或
|
或
|
||||||
|
|
||||||
`pip3 install src/connector/python/linux/python3/`
|
`pip3 install src/connector/python/`
|
||||||
|
|
||||||
#### Windows
|
#### Windows
|
||||||
在已安装Windows TDengine 客户端的情况下, 将文件"C:\TDengine\driver\taos.dll" 拷贝到 "C:\windows\system32" 目录下, 然后进入Windwos <em>cmd</em> 命令行界面
|
在已安装Windows TDengine 客户端的情况下, 将文件"C:\TDengine\driver\taos.dll" 拷贝到 "C:\windows\system32" 目录下, 然后进入Windwos <em>cmd</em> 命令行界面
|
||||||
```cmd
|
```cmd
|
||||||
cd C:\TDengine\connector\python\windows
|
cd C:\TDengine\connector\python
|
||||||
python -m pip install python2\
|
python -m pip install .
|
||||||
```
|
|
||||||
或
|
|
||||||
```cmd
|
|
||||||
cd C:\TDengine\connector\python\windows
|
|
||||||
python -m pip install python3\
|
|
||||||
```
|
```
|
||||||
|
|
||||||
* 如果机器上没有pip命令,用户可将src/connector/python/python3或src/connector/python/python2下的taos文件夹拷贝到应用程序的目录使用。
|
* 如果机器上没有pip命令,用户可将src/connector/python下的taos文件夹拷贝到应用程序的目录使用。
|
||||||
对于windows 客户端,安装TDengine windows 客户端后,将C:\TDengine\driver\taos.dll拷贝到C:\windows\system32目录下即可。
|
对于windows 客户端,安装TDengine windows 客户端后,将C:\TDengine\driver\taos.dll拷贝到C:\windows\system32目录下即可。
|
||||||
|
|
||||||
### 使用
|
### 使用
|
||||||
|
|
|
@ -156,20 +156,11 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
|
||||||
fi
|
fi
|
||||||
cp -r ${connector_dir}/python ${install_dir}/connector
|
cp -r ${connector_dir}/python ${install_dir}/connector
|
||||||
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/cinterface.py
|
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/cinterface.py
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/cinterface.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/cinterface.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/cinterface.py
|
|
||||||
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/subscription.py
|
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/subscription.py
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/subscription.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/subscription.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/subscription.py
|
|
||||||
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/connection.py
|
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/connection.py
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/connection.py
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/connection.py
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/connection.py
|
|
||||||
fi
|
fi
|
||||||
# Copy release note
|
# Copy release note
|
||||||
# cp ${script_dir}/release_note ${install_dir}
|
# cp ${script_dir}/release_note ${install_dir}
|
||||||
|
|
|
@ -179,20 +179,11 @@ if [[ "$pagMode" != "lite" ]] && [[ "$cpuType" != "aarch32" ]]; then
|
||||||
fi
|
fi
|
||||||
cp -r ${connector_dir}/python ${install_dir}/connector/
|
cp -r ${connector_dir}/python ${install_dir}/connector/
|
||||||
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/cinterface.py
|
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/cinterface.py
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/cinterface.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/cinterface.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/cinterface.py
|
|
||||||
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/subscription.py
|
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/subscription.py
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/subscription.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/subscription.py
|
|
||||||
sed -i '/password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/subscription.py
|
|
||||||
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python2/taos/connection.py
|
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/taos/connection.py
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/linux/python3/taos/connection.py
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python2/taos/connection.py
|
|
||||||
sed -i '/self._password/ {s/taosdata/powerdb/g}' ${install_dir}/connector/python/windows/python3/taos/connection.py
|
|
||||||
fi
|
fi
|
||||||
# Copy release note
|
# Copy release note
|
||||||
# cp ${script_dir}/release_note ${install_dir}
|
# cp ${script_dir}/release_note ${install_dir}
|
||||||
|
|
|
@ -1348,30 +1348,27 @@ int tsParseSql(SSqlObj *pSql, bool initial) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// make a backup as tsParseInsertSql may modify the string
|
// make a backup as tsParseInsertSql may modify the string
|
||||||
char* sqlstr = strdup(pSql->sqlstr);
|
|
||||||
ret = tsParseInsertSql(pSql);
|
ret = tsParseInsertSql(pSql);
|
||||||
if ((sqlstr == NULL) || (pSql->parseRetry >= 1) ||
|
if ((pSql->parseRetry >= 1) || (ret != TSDB_CODE_TSC_SQL_SYNTAX_ERROR && ret != TSDB_CODE_TSC_INVALID_SQL)) {
|
||||||
(ret != TSDB_CODE_TSC_SQL_SYNTAX_ERROR && ret != TSDB_CODE_TSC_INVALID_SQL)) {
|
|
||||||
free(sqlstr);
|
|
||||||
} else {
|
} else {
|
||||||
tscResetSqlCmd(pCmd, true);
|
tscResetSqlCmd(pCmd, true);
|
||||||
free(pSql->sqlstr);
|
|
||||||
pSql->sqlstr = sqlstr;
|
|
||||||
pSql->parseRetry++;
|
pSql->parseRetry++;
|
||||||
if ((ret = tsInsertInitialCheck(pSql)) == TSDB_CODE_SUCCESS) {
|
if ((ret = tsInsertInitialCheck(pSql)) == TSDB_CODE_SUCCESS) {
|
||||||
ret = tsParseInsertSql(pSql);
|
ret = tsParseInsertSql(pSql);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
SSqlInfo SQLInfo = qSqlParse(pSql->sqlstr);
|
SSqlInfo sqlInfo = qSqlParse(pSql->sqlstr);
|
||||||
ret = tscToSQLCmd(pSql, &SQLInfo);
|
ret = tscToSQLCmd(pSql, &sqlInfo);
|
||||||
if (ret == TSDB_CODE_TSC_INVALID_SQL && pSql->parseRetry == 0 && SQLInfo.type == TSDB_SQL_NULL) {
|
if (ret == TSDB_CODE_TSC_INVALID_SQL && pSql->parseRetry == 0/* && sqlInfo.type == TSDB_SQL_NULL*/) {
|
||||||
|
tscDebug("0x%"PRIx64 " parse sql failed, retry again after clear local meta cache", pSql->self);
|
||||||
tscResetSqlCmd(pCmd, true);
|
tscResetSqlCmd(pCmd, true);
|
||||||
pSql->parseRetry++;
|
pSql->parseRetry++;
|
||||||
ret = tscToSQLCmd(pSql, &SQLInfo);
|
|
||||||
|
ret = tscToSQLCmd(pSql, &sqlInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
SqlInfoDestroy(&SQLInfo);
|
SqlInfoDestroy(&sqlInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -361,11 +361,18 @@ int32_t tscToSQLCmd(SSqlObj* pSql, struct SSqlInfo* pInfo) {
|
||||||
const char* msg2 = "name too long";
|
const char* msg2 = "name too long";
|
||||||
|
|
||||||
SCreateDbInfo* pCreateDB = &(pInfo->pMiscInfo->dbOpt);
|
SCreateDbInfo* pCreateDB = &(pInfo->pMiscInfo->dbOpt);
|
||||||
if (tscValidateName(&pCreateDB->dbname) != TSDB_CODE_SUCCESS) {
|
if (pCreateDB->dbname.n >= TSDB_DB_NAME_LEN) {
|
||||||
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg2);
|
||||||
|
}
|
||||||
|
|
||||||
|
char buf[TSDB_DB_NAME_LEN] = {0};
|
||||||
|
SStrToken token = taosTokenDup(&pCreateDB->dbname, buf, tListLen(buf));
|
||||||
|
|
||||||
|
if (tscValidateName(&token) != TSDB_CODE_SUCCESS) {
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg1);
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg1);
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t ret = tNameSetDbName(&pTableMetaInfo->name, getAccountId(pSql), &(pCreateDB->dbname));
|
int32_t ret = tNameSetDbName(&pTableMetaInfo->name, getAccountId(pSql), &token);
|
||||||
if (ret != TSDB_CODE_SUCCESS) {
|
if (ret != TSDB_CODE_SUCCESS) {
|
||||||
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg2);
|
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg2);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2517,7 +2517,7 @@ static int32_t getTableMetaFromMnode(SSqlObj *pSql, STableMetaInfo *pTableMetaIn
|
||||||
pNew->fp = tscTableMetaCallBack;
|
pNew->fp = tscTableMetaCallBack;
|
||||||
pNew->param = (void *)pSql->self;
|
pNew->param = (void *)pSql->self;
|
||||||
|
|
||||||
tscDebug("0x%"PRIx64" metaRid from %" PRId64 " to %" PRId64 , pSql->self, pSql->metaRid, pNew->self);
|
tscDebug("0x%"PRIx64" metaRid from %" PRId64 " to 0x%" PRIx64 , pSql->self, pSql->metaRid, pNew->self);
|
||||||
|
|
||||||
pSql->metaRid = pNew->self;
|
pSql->metaRid = pNew->self;
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ char tsEmail[TSDB_FQDN_LEN] = {0};
|
||||||
int32_t tsDnodeId = 0;
|
int32_t tsDnodeId = 0;
|
||||||
|
|
||||||
// common
|
// common
|
||||||
int32_t tsRpcTimer = 1000;
|
int32_t tsRpcTimer = 300;
|
||||||
int32_t tsRpcMaxTime = 600; // seconds;
|
int32_t tsRpcMaxTime = 600; // seconds;
|
||||||
int32_t tsMaxShellConns = 50000;
|
int32_t tsMaxShellConns = 50000;
|
||||||
int32_t tsMaxConnections = 5000;
|
int32_t tsMaxConnections = 5000;
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 8ce6d86558afc8c0b50c10f990fd2b4270cf06fc
|
Subproject commit 7a26c432f8b4203e42344ff3290b9b9b01b983d5
|
|
@ -0,0 +1,154 @@
|
||||||
|
|
||||||
|
# Created by https://www.toptal.com/developers/gitignore/api/python
|
||||||
|
# Edit at https://www.toptal.com/developers/gitignore?templates=python
|
||||||
|
|
||||||
|
### Python ###
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
pip-wheel-metadata/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
pytestdebug.log
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
doc/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
# .env
|
||||||
|
.env/
|
||||||
|
.venv/
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
pythonenv*
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# operating system-related files
|
||||||
|
# file properties cache/storage on macOS
|
||||||
|
*.DS_Store
|
||||||
|
# thumbnail cache on Windows
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# profiling data
|
||||||
|
.prof
|
||||||
|
|
||||||
|
|
||||||
|
# End of https://www.toptal.com/developers/gitignore/api/python
|
|
@ -0,0 +1,17 @@
|
||||||
|
# TDengine Connector for Python
|
||||||
|
|
||||||
|
[TDengine] connector for Python enables python programs to access TDengine, using an API which is compliant with the Python DB API 2.0 (PEP-249). It uses TDengine C client library for client server communications.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```sh
|
||||||
|
pip install git+https://github.com/taosdata/TDengine-connector-python
|
||||||
|
```
|
||||||
|
|
||||||
|
## Source Code
|
||||||
|
|
||||||
|
[TDengine] connector for Python source code is hosted on [GitHub](https://github.com/taosdata/TDengine-connector-python).
|
||||||
|
|
||||||
|
## License - AGPL
|
||||||
|
|
||||||
|
Keep same with [TDengine](https://github.com/taosdata/TDengine).
|
|
@ -0,0 +1,12 @@
|
||||||
|
import taos
|
||||||
|
|
||||||
|
conn = taos.connect(host='127.0.0.1',
|
||||||
|
user='root',
|
||||||
|
passworkd='taodata',
|
||||||
|
database='log')
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
sql = "select * from log.log limit 10"
|
||||||
|
cursor.execute(sql)
|
||||||
|
for row in cursor:
|
||||||
|
print(row)
|
|
@ -1 +0,0 @@
|
||||||
# TDengine python client interface
|
|
|
@ -1,20 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
with open("README.md", "r") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setuptools.setup(
|
|
||||||
name="taos",
|
|
||||||
version="2.0.9",
|
|
||||||
author="Taosdata Inc.",
|
|
||||||
author_email="support@taosdata.com",
|
|
||||||
description="TDengine python client package",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/pypa/sampleproject",
|
|
||||||
packages=setuptools.find_packages(),
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 2",
|
|
||||||
"Operating System :: Linux",
|
|
||||||
],
|
|
||||||
)
|
|
|
@ -1,648 +0,0 @@
|
||||||
import ctypes
|
|
||||||
from .constants import FieldType
|
|
||||||
from .error import *
|
|
||||||
import math
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
|
||||||
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
|
||||||
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
_timestamp_converter = _convert_millisecond_to_datetime
|
|
||||||
if micro:
|
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
|
||||||
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_byte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_bool))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_unsigned_to_python(
|
|
||||||
data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C float row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C double row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
tmpstr = ctypes.c_char_p(data)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
else:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows > 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i + 2,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
|
||||||
('type', ctypes.c_char),
|
|
||||||
('bytes', ctypes.c_short)]
|
|
||||||
|
|
||||||
# C interface class
|
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.so')
|
|
||||||
|
|
||||||
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
|
||||||
libtaos.taos_init.restype = None
|
|
||||||
libtaos.taos_connect.restype = ctypes.c_void_p
|
|
||||||
#libtaos.taos_use_result.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
libtaos.taos_errstr.restype = ctypes.c_char_p
|
|
||||||
libtaos.taos_subscribe.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_consume.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_free_result.restype = None
|
|
||||||
libtaos.taos_errno.restype = ctypes.c_int
|
|
||||||
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
|
|
||||||
def __init__(self, config=None):
|
|
||||||
'''
|
|
||||||
Function to initialize the class
|
|
||||||
@host : str, hostname to connect
|
|
||||||
@user : str, username to connect to server
|
|
||||||
@password : str, password to connect to server
|
|
||||||
@db : str, default db to use when log in
|
|
||||||
@config : str, config directory
|
|
||||||
|
|
||||||
@rtype : None
|
|
||||||
'''
|
|
||||||
if config is None:
|
|
||||||
self._config = ctypes.c_char_p(None)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._config = ctypes.c_char_p(config.encode('utf-8'))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("config is expected as a str")
|
|
||||||
|
|
||||||
if config is not None:
|
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
""" Get current config
|
|
||||||
"""
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def connect(
|
|
||||||
self,
|
|
||||||
host=None,
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
db=None,
|
|
||||||
port=0):
|
|
||||||
'''
|
|
||||||
Function to connect to server
|
|
||||||
|
|
||||||
@rtype: c_void_p, TDengine handle
|
|
||||||
'''
|
|
||||||
# host
|
|
||||||
try:
|
|
||||||
_host = ctypes.c_char_p(host.encode(
|
|
||||||
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("host is expected as a str")
|
|
||||||
|
|
||||||
# user
|
|
||||||
try:
|
|
||||||
_user = ctypes.c_char_p(user.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("user is expected as a str")
|
|
||||||
|
|
||||||
# password
|
|
||||||
try:
|
|
||||||
_password = ctypes.c_char_p(password.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("password is expected as a str")
|
|
||||||
|
|
||||||
# db
|
|
||||||
try:
|
|
||||||
_db = ctypes.c_char_p(
|
|
||||||
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("db is expected as a str")
|
|
||||||
|
|
||||||
# port
|
|
||||||
try:
|
|
||||||
_port = ctypes.c_int(port)
|
|
||||||
except TypeError:
|
|
||||||
raise TypeError("port is expected as an int")
|
|
||||||
|
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
|
||||||
_host, _user, _password, _db, _port))
|
|
||||||
|
|
||||||
if connection.value is None:
|
|
||||||
print('connect to TDengine failed')
|
|
||||||
raise ConnectionError("connect to TDengine failed")
|
|
||||||
# sys.exit(1)
|
|
||||||
# else:
|
|
||||||
# print('connect to TDengine success')
|
|
||||||
|
|
||||||
return connection
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def close(connection):
|
|
||||||
'''Close the TDengine handle
|
|
||||||
'''
|
|
||||||
CTaosInterface.libtaos.taos_close(connection)
|
|
||||||
#print('connection is closed')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query(connection, sql):
|
|
||||||
'''Run SQL
|
|
||||||
|
|
||||||
@sql: str, sql string to run
|
|
||||||
|
|
||||||
@rtype: 0 on success and -1 on failure
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
return CTaosInterface.libtaos.taos_query(
|
|
||||||
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("sql is expected as a string")
|
|
||||||
# finally:
|
|
||||||
# CTaosInterface.libtaos.close(connection)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def affectedRows(result):
|
|
||||||
"""The affected rows after runing query
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_affected_rows(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription
|
|
||||||
@restart boolean,
|
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
|
||||||
@topic string, name of this subscription
|
|
||||||
"""
|
|
||||||
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
|
|
||||||
connection,
|
|
||||||
1 if restart else 0,
|
|
||||||
ctypes.c_char_p(topic.encode('utf-8')),
|
|
||||||
ctypes.c_char_p(sql.encode('utf-8')),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
interval))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def consume(sub):
|
|
||||||
"""Consume data of a subscription
|
|
||||||
"""
|
|
||||||
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
return result, fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def unsubscribe(sub, keepProgress):
|
|
||||||
"""Cancel a subscription
|
|
||||||
"""
|
|
||||||
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def useResult(result):
|
|
||||||
'''Use result after calling self.query
|
|
||||||
'''
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.fieldsCount(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
|
|
||||||
return fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchBlock(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
|
|
||||||
result, ctypes.byref(pblock))
|
|
||||||
if num_of_rows == 0:
|
|
||||||
return None, 0
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchRow(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
if pblock:
|
|
||||||
num_of_rows = 1
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
|
||||||
raise DatabaseError(
|
|
||||||
"Invalid data type returned from database")
|
|
||||||
if data is None:
|
|
||||||
blocks[i] = [None]
|
|
||||||
else:
|
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
else:
|
|
||||||
return None, 0
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def freeResult(result):
|
|
||||||
CTaosInterface.libtaos.taos_free_result(result)
|
|
||||||
result.value = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fieldsCount(result):
|
|
||||||
return CTaosInterface.libtaos.taos_field_count(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchFields(result):
|
|
||||||
return CTaosInterface.libtaos.taos_fetch_fields(result)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def fetchRow(result, fields):
|
|
||||||
# l = []
|
|
||||||
# row = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
# if not row:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# for i in range(len(fields)):
|
|
||||||
# l.append(CTaosInterface.getDataValue(
|
|
||||||
# row[i], fields[i]['type'], fields[i]['bytes']))
|
|
||||||
|
|
||||||
# return tuple(l)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def getDataValue(data, dtype, byte):
|
|
||||||
# '''
|
|
||||||
# '''
|
|
||||||
# if not data:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
|
|
||||||
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
|
|
||||||
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errno(result):
|
|
||||||
"""Return the error number.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errno(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errStr(result):
|
|
||||||
"""Return the error styring
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cinter = CTaosInterface()
|
|
||||||
conn = cinter.connect()
|
|
||||||
result = cinter.query(conn, 'show databases')
|
|
||||||
|
|
||||||
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
|
|
||||||
|
|
||||||
fields = CTaosInterface.useResult(result)
|
|
||||||
|
|
||||||
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
|
|
||||||
print(data)
|
|
||||||
|
|
||||||
cinter.freeResult(result)
|
|
||||||
cinter.close(conn)
|
|
|
@ -1,278 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
from .constants import FieldType
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
.description: Read-only attribute consists of 7-item sequences:
|
|
||||||
|
|
||||||
> name (mondatory)
|
|
||||||
> type_code (mondatory)
|
|
||||||
> display_size
|
|
||||||
> internal_size
|
|
||||||
> precision
|
|
||||||
> scale
|
|
||||||
> null_ok
|
|
||||||
|
|
||||||
This attribute will be None for operations that do not return rows or
|
|
||||||
if the cursor has not had an operation invoked via the .execute*() method yet.
|
|
||||||
|
|
||||||
.rowcount:This read-only attribute specifies the number of rows that the last
|
|
||||||
.execute*() produced (for DQL statements like SELECT) or affected
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
self._connection = None
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
self._logfile = ""
|
|
||||||
|
|
||||||
if connection is not None:
|
|
||||||
self._connection = connection
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetch iterator")
|
|
||||||
|
|
||||||
if self._block_rows <= self._block_iter:
|
|
||||||
block, self._block_rows = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
if self._block_rows == 0:
|
|
||||||
raise StopIteration
|
|
||||||
self._block = list(map(tuple, zip(*block)))
|
|
||||||
self._block_iter = 0
|
|
||||||
|
|
||||||
data = self._block[self._block_iter]
|
|
||||||
self._block_iter += 1
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""Return the description of the object.
|
|
||||||
"""
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rowcount(self):
|
|
||||||
"""Return the rowcount of the object
|
|
||||||
"""
|
|
||||||
return self._rowcount
|
|
||||||
|
|
||||||
@property
|
|
||||||
def affected_rows(self):
|
|
||||||
"""Return the affected_rows of the object
|
|
||||||
"""
|
|
||||||
return self._affected_rows
|
|
||||||
|
|
||||||
def callproc(self, procname, *args):
|
|
||||||
"""Call a stored database procedure with the given name.
|
|
||||||
|
|
||||||
Void functionality since no stored procedures.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def log(self, logfile):
|
|
||||||
self._logfile = logfile
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close the cursor.
|
|
||||||
"""
|
|
||||||
if self._connection is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
self._connection = None
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def execute(self, operation, params=None):
|
|
||||||
"""Prepare and execute a database operation (query or command).
|
|
||||||
"""
|
|
||||||
if not operation:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._connection:
|
|
||||||
# TODO : change the exception raised here
|
|
||||||
raise ProgrammingError("Cursor is not connected")
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
|
|
||||||
stmt = operation
|
|
||||||
if params is not None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# global querySeqNum
|
|
||||||
# querySeqNum += 1
|
|
||||||
# localSeqNum = querySeqNum # avoid raice condition
|
|
||||||
# print(" >> Exec Query ({}): {}".format(localSeqNum, str(stmt)))
|
|
||||||
self._result = CTaosInterface.query(self._connection._conn, stmt)
|
|
||||||
# print(" << Query ({}) Exec Done".format(localSeqNum))
|
|
||||||
if (self._logfile):
|
|
||||||
with open(self._logfile, "a") as logfile:
|
|
||||||
logfile.write("%s;\n" % operation)
|
|
||||||
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno == 0:
|
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
|
||||||
self._result)
|
|
||||||
return CTaosInterface.affectedRows(self._result)
|
|
||||||
else:
|
|
||||||
self._fields = CTaosInterface.useResult(
|
|
||||||
self._result)
|
|
||||||
return self._handle_result()
|
|
||||||
else:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
|
|
||||||
def executemany(self, operation, seq_of_parameters):
|
|
||||||
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchone(self):
|
|
||||||
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchmany(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def istype(self, col, dataType):
|
|
||||||
if (dataType.upper() == "BOOL"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BOOL):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TINYINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TINYINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "SMALLINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_SMALLINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "SMALLINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "INT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "INT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BIGINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BIGINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BIGINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "FLOAT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_FLOAT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "DOUBLE"):
|
|
||||||
if (self._description[col][1] == FieldType.C_DOUBLE):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BINARY"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BINARY):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TIMESTAMP"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TIMESTAMP):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "NCHAR"):
|
|
||||||
if (self._description[col][1] == FieldType.C_NCHAR):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def fetchall_row(self):
|
|
||||||
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
|
||||||
"""
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def fetchall(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def nextset(self):
|
|
||||||
"""
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setinputsize(self, sizes):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setutputsize(self, size, column=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _reset_result(self):
|
|
||||||
"""Reset the result to unused version.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
if self._result is not None:
|
|
||||||
CTaosInterface.freeResult(self._result)
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
|
|
||||||
def _handle_result(self):
|
|
||||||
"""Handle the return result from query.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
for ele in self._fields:
|
|
||||||
self._description.append(
|
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
|
||||||
|
|
||||||
return self._result
|
|
|
@ -1,12 +0,0 @@
|
||||||
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
|
|
||||||
|
|
||||||
This program is free software: you can use, redistribute, and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License, version 3
|
|
||||||
or later ("AGPL"), as published by the Free Software Foundation.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT
|
|
||||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
@ -1 +0,0 @@
|
||||||
# TDengine python client interface
|
|
|
@ -1,20 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
with open("README.md", "r") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setuptools.setup(
|
|
||||||
name="taos",
|
|
||||||
version="2.0.9",
|
|
||||||
author="Taosdata Inc.",
|
|
||||||
author_email="support@taosdata.com",
|
|
||||||
description="TDengine python client package",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/pypa/sampleproject",
|
|
||||||
packages=setuptools.find_packages(),
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Operating System :: Linux",
|
|
||||||
],
|
|
||||||
)
|
|
|
@ -1,25 +0,0 @@
|
||||||
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
from .cursor import TDengineCursor
|
|
||||||
from .error import Error
|
|
||||||
|
|
||||||
# Globals
|
|
||||||
threadsafety = 0
|
|
||||||
paramstyle = 'pyformat'
|
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
|
||||||
""" Function to return a TDengine connector object
|
|
||||||
|
|
||||||
Current supporting keyword parameters:
|
|
||||||
@dsn: Data source name as string
|
|
||||||
@user: Username as string(optional)
|
|
||||||
@password: Password as string(optional)
|
|
||||||
@host: Hostname(optional)
|
|
||||||
@database: Database name(optional)
|
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
|
||||||
"""
|
|
||||||
return TDengineConnection(*args, **kwargs)
|
|
|
@ -1,95 +0,0 @@
|
||||||
from .cursor import TDengineCursor
|
|
||||||
from .subscription import TDengineSubscription
|
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
|
||||||
""" TDengine connection object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self._conn = None
|
|
||||||
self._host = None
|
|
||||||
self._user = "root"
|
|
||||||
self._password = "taosdata"
|
|
||||||
self._database = None
|
|
||||||
self._port = 0
|
|
||||||
self._config = None
|
|
||||||
self._chandle = None
|
|
||||||
|
|
||||||
self.config(**kwargs)
|
|
||||||
|
|
||||||
def config(self, **kwargs):
|
|
||||||
# host
|
|
||||||
if 'host' in kwargs:
|
|
||||||
self._host = kwargs['host']
|
|
||||||
|
|
||||||
# user
|
|
||||||
if 'user' in kwargs:
|
|
||||||
self._user = kwargs['user']
|
|
||||||
|
|
||||||
# password
|
|
||||||
if 'password' in kwargs:
|
|
||||||
self._password = kwargs['password']
|
|
||||||
|
|
||||||
# database
|
|
||||||
if 'database' in kwargs:
|
|
||||||
self._database = kwargs['database']
|
|
||||||
|
|
||||||
# port
|
|
||||||
if 'port' in kwargs:
|
|
||||||
self._port = kwargs['port']
|
|
||||||
|
|
||||||
# config
|
|
||||||
if 'config' in kwargs:
|
|
||||||
self._config = kwargs['config']
|
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
|
||||||
self._conn = self._chandle.connect(
|
|
||||||
self._host,
|
|
||||||
self._user,
|
|
||||||
self._password,
|
|
||||||
self._database,
|
|
||||||
self._port)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close current connection.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.close(self._conn)
|
|
||||||
|
|
||||||
def subscribe(self, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription.
|
|
||||||
"""
|
|
||||||
if self._conn is None:
|
|
||||||
return None
|
|
||||||
sub = CTaosInterface.subscribe(
|
|
||||||
self._conn, restart, topic, sql, interval)
|
|
||||||
return TDengineSubscription(sub)
|
|
||||||
|
|
||||||
def cursor(self):
|
|
||||||
"""Return a new Cursor object using the connection.
|
|
||||||
"""
|
|
||||||
return TDengineCursor(self)
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Commit any pending transaction to the database.
|
|
||||||
|
|
||||||
Since TDengine do not support transactions, the implement is void functionality.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Void functionality
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clear_result_set(self):
|
|
||||||
"""Clear unused result set on this connection.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
|
||||||
conn.close()
|
|
||||||
print("Hello world")
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""Constants in TDengine python
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .dbapi import *
|
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
|
||||||
"""TDengine Field Types
|
|
||||||
"""
|
|
||||||
# type_code
|
|
||||||
C_NULL = 0
|
|
||||||
C_BOOL = 1
|
|
||||||
C_TINYINT = 2
|
|
||||||
C_SMALLINT = 3
|
|
||||||
C_INT = 4
|
|
||||||
C_BIGINT = 5
|
|
||||||
C_FLOAT = 6
|
|
||||||
C_DOUBLE = 7
|
|
||||||
C_BINARY = 8
|
|
||||||
C_TIMESTAMP = 9
|
|
||||||
C_NCHAR = 10
|
|
||||||
C_TINYINT_UNSIGNED = 11
|
|
||||||
C_SMALLINT_UNSIGNED = 12
|
|
||||||
C_INT_UNSIGNED = 13
|
|
||||||
C_BIGINT_UNSIGNED = 14
|
|
||||||
# NULL value definition
|
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
|
||||||
C_BOOL_NULL = 0x02
|
|
||||||
C_TINYINT_NULL = -128
|
|
||||||
C_TINYINT_UNSIGNED_NULL = 255
|
|
||||||
C_SMALLINT_NULL = -32768
|
|
||||||
C_SMALLINT_UNSIGNED_NULL = 65535
|
|
||||||
C_INT_NULL = -2147483648
|
|
||||||
C_INT_UNSIGNED_NULL = 4294967295
|
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
|
||||||
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
|
||||||
C_FLOAT_NULL = float('nan')
|
|
||||||
C_DOUBLE_NULL = float('nan')
|
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
|
||||||
# Timestamp precision definition
|
|
||||||
C_TIMESTAMP_MILLI = 0
|
|
||||||
C_TIMESTAMP_MICRO = 1
|
|
|
@ -1,44 +0,0 @@
|
||||||
"""Type Objects and Constructors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
|
||||||
def __init__(self, *values):
|
|
||||||
self.values = values
|
|
||||||
|
|
||||||
def __com__(self, other):
|
|
||||||
if other in self.values:
|
|
||||||
return 0
|
|
||||||
if other < self.values:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
|
||||||
Time = datetime.time
|
|
||||||
Timestamp = datetime.datetime
|
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
|
||||||
return Date(*time.localtime(ticks)[:3])
|
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
|
||||||
# ROWID = DBAPITypeObject()
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""Python exceptions
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
def __init__(self, msg=None, errno=None):
|
|
||||||
self.msg = msg
|
|
||||||
self._full_msg = self.msg
|
|
||||||
self.errno = errno
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self._full_msg
|
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(DatabaseError):
|
|
||||||
"""Exception raised when the relational integrity of the database is affected.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
|
||||||
"""Exception raised when the database encounters an internal error.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
|
||||||
"""Exception raised for programming errors.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -1,57 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
|
||||||
"""TDengine subscription object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, sub):
|
|
||||||
self._sub = sub
|
|
||||||
|
|
||||||
def consume(self):
|
|
||||||
"""Consume rows of a subscription
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
raise OperationalError("Invalid use of consume")
|
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
|
||||||
buffer = [[] for i in range(len(fields))]
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
for i in range(len(fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
self.fields = fields
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def close(self, keepProgress=True):
|
|
||||||
"""Close the Subscription.
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
conn = TDengineConnection(
|
|
||||||
host="127.0.0.1",
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
database="test")
|
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
|
||||||
|
|
||||||
for i in range(0, 10):
|
|
||||||
data = sub.consume()
|
|
||||||
for d in data:
|
|
||||||
print(d)
|
|
||||||
|
|
||||||
sub.close()
|
|
||||||
conn.close()
|
|
|
@ -1,12 +0,0 @@
|
||||||
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
|
|
||||||
|
|
||||||
This program is free software: you can use, redistribute, and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License, version 3
|
|
||||||
or later ("AGPL"), as published by the Free Software Foundation.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT
|
|
||||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
@ -1 +0,0 @@
|
||||||
# TDengine python client interface
|
|
|
@ -1,20 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
with open("README.md", "r") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setuptools.setup(
|
|
||||||
name="taos",
|
|
||||||
version="2.0.9",
|
|
||||||
author="Taosdata Inc.",
|
|
||||||
author_email="support@taosdata.com",
|
|
||||||
description="TDengine python client package",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/pypa/sampleproject",
|
|
||||||
packages=setuptools.find_packages(),
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Operating System :: MacOS X",
|
|
||||||
],
|
|
||||||
)
|
|
|
@ -1,24 +0,0 @@
|
||||||
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
from .cursor import TDengineCursor
|
|
||||||
|
|
||||||
# Globals
|
|
||||||
threadsafety = 0
|
|
||||||
paramstyle = 'pyformat'
|
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
|
||||||
""" Function to return a TDengine connector object
|
|
||||||
|
|
||||||
Current supporting keyword parameters:
|
|
||||||
@dsn: Data source name as string
|
|
||||||
@user: Username as string(optional)
|
|
||||||
@password: Password as string(optional)
|
|
||||||
@host: Hostname(optional)
|
|
||||||
@database: Database name(optional)
|
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
|
||||||
"""
|
|
||||||
return TDengineConnection(*args, **kwargs)
|
|
|
@ -1,648 +0,0 @@
|
||||||
import ctypes
|
|
||||||
from .constants import FieldType
|
|
||||||
from .error import *
|
|
||||||
import math
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
|
||||||
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
|
||||||
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
_timestamp_converter = _convert_millisecond_to_datetime
|
|
||||||
if micro:
|
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
|
||||||
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_byte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_bool))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_unsigned_to_python(
|
|
||||||
data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C float row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C double row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
tmpstr = ctypes.c_char_p(data)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
else:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows > 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i + 2,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
|
||||||
('type', ctypes.c_char),
|
|
||||||
('bytes', ctypes.c_short)]
|
|
||||||
|
|
||||||
# C interface class
|
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.dylib')
|
|
||||||
|
|
||||||
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
|
||||||
libtaos.taos_init.restype = None
|
|
||||||
libtaos.taos_connect.restype = ctypes.c_void_p
|
|
||||||
#libtaos.taos_use_result.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
libtaos.taos_errstr.restype = ctypes.c_char_p
|
|
||||||
libtaos.taos_subscribe.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_consume.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_free_result.restype = None
|
|
||||||
libtaos.taos_errno.restype = ctypes.c_int
|
|
||||||
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
|
|
||||||
def __init__(self, config=None):
|
|
||||||
'''
|
|
||||||
Function to initialize the class
|
|
||||||
@host : str, hostname to connect
|
|
||||||
@user : str, username to connect to server
|
|
||||||
@password : str, password to connect to server
|
|
||||||
@db : str, default db to use when log in
|
|
||||||
@config : str, config directory
|
|
||||||
|
|
||||||
@rtype : None
|
|
||||||
'''
|
|
||||||
if config is None:
|
|
||||||
self._config = ctypes.c_char_p(None)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._config = ctypes.c_char_p(config.encode('utf-8'))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("config is expected as a str")
|
|
||||||
|
|
||||||
if config is not None:
|
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
""" Get current config
|
|
||||||
"""
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def connect(
|
|
||||||
self,
|
|
||||||
host=None,
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
db=None,
|
|
||||||
port=0):
|
|
||||||
'''
|
|
||||||
Function to connect to server
|
|
||||||
|
|
||||||
@rtype: c_void_p, TDengine handle
|
|
||||||
'''
|
|
||||||
# host
|
|
||||||
try:
|
|
||||||
_host = ctypes.c_char_p(host.encode(
|
|
||||||
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("host is expected as a str")
|
|
||||||
|
|
||||||
# user
|
|
||||||
try:
|
|
||||||
_user = ctypes.c_char_p(user.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("user is expected as a str")
|
|
||||||
|
|
||||||
# password
|
|
||||||
try:
|
|
||||||
_password = ctypes.c_char_p(password.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("password is expected as a str")
|
|
||||||
|
|
||||||
# db
|
|
||||||
try:
|
|
||||||
_db = ctypes.c_char_p(
|
|
||||||
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("db is expected as a str")
|
|
||||||
|
|
||||||
# port
|
|
||||||
try:
|
|
||||||
_port = ctypes.c_int(port)
|
|
||||||
except TypeError:
|
|
||||||
raise TypeError("port is expected as an int")
|
|
||||||
|
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
|
||||||
_host, _user, _password, _db, _port))
|
|
||||||
|
|
||||||
if connection.value is None:
|
|
||||||
print('connect to TDengine failed')
|
|
||||||
raise ConnectionError("connect to TDengine failed")
|
|
||||||
# sys.exit(1)
|
|
||||||
# else:
|
|
||||||
# print('connect to TDengine success')
|
|
||||||
|
|
||||||
return connection
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def close(connection):
|
|
||||||
'''Close the TDengine handle
|
|
||||||
'''
|
|
||||||
CTaosInterface.libtaos.taos_close(connection)
|
|
||||||
#print('connection is closed')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query(connection, sql):
|
|
||||||
'''Run SQL
|
|
||||||
|
|
||||||
@sql: str, sql string to run
|
|
||||||
|
|
||||||
@rtype: 0 on success and -1 on failure
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
return CTaosInterface.libtaos.taos_query(
|
|
||||||
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("sql is expected as a string")
|
|
||||||
# finally:
|
|
||||||
# CTaosInterface.libtaos.close(connection)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def affectedRows(result):
|
|
||||||
"""The affected rows after runing query
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_affected_rows(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription
|
|
||||||
@restart boolean,
|
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
|
||||||
@topic string, name of this subscription
|
|
||||||
"""
|
|
||||||
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
|
|
||||||
connection,
|
|
||||||
1 if restart else 0,
|
|
||||||
ctypes.c_char_p(topic.encode('utf-8')),
|
|
||||||
ctypes.c_char_p(sql.encode('utf-8')),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
interval))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def consume(sub):
|
|
||||||
"""Consume data of a subscription
|
|
||||||
"""
|
|
||||||
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
return result, fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def unsubscribe(sub, keepProgress):
|
|
||||||
"""Cancel a subscription
|
|
||||||
"""
|
|
||||||
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def useResult(result):
|
|
||||||
'''Use result after calling self.query
|
|
||||||
'''
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.fieldsCount(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
|
|
||||||
return fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchBlock(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
|
|
||||||
result, ctypes.byref(pblock))
|
|
||||||
if num_of_rows == 0:
|
|
||||||
return None, 0
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchRow(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
if pblock:
|
|
||||||
num_of_rows = 1
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
|
||||||
raise DatabaseError(
|
|
||||||
"Invalid data type returned from database")
|
|
||||||
if data is None:
|
|
||||||
blocks[i] = [None]
|
|
||||||
else:
|
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
else:
|
|
||||||
return None, 0
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def freeResult(result):
|
|
||||||
CTaosInterface.libtaos.taos_free_result(result)
|
|
||||||
result.value = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fieldsCount(result):
|
|
||||||
return CTaosInterface.libtaos.taos_field_count(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchFields(result):
|
|
||||||
return CTaosInterface.libtaos.taos_fetch_fields(result)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def fetchRow(result, fields):
|
|
||||||
# l = []
|
|
||||||
# row = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
# if not row:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# for i in range(len(fields)):
|
|
||||||
# l.append(CTaosInterface.getDataValue(
|
|
||||||
# row[i], fields[i]['type'], fields[i]['bytes']))
|
|
||||||
|
|
||||||
# return tuple(l)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def getDataValue(data, dtype, byte):
|
|
||||||
# '''
|
|
||||||
# '''
|
|
||||||
# if not data:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
|
|
||||||
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
|
|
||||||
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errno(result):
|
|
||||||
"""Return the error number.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errno(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errStr(result):
|
|
||||||
"""Return the error styring
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cinter = CTaosInterface()
|
|
||||||
conn = cinter.connect()
|
|
||||||
result = cinter.query(conn, 'show databases')
|
|
||||||
|
|
||||||
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
|
|
||||||
|
|
||||||
fields = CTaosInterface.useResult(result)
|
|
||||||
|
|
||||||
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
|
|
||||||
print(data)
|
|
||||||
|
|
||||||
cinter.freeResult(result)
|
|
||||||
cinter.close(conn)
|
|
|
@ -1,95 +0,0 @@
|
||||||
from .cursor import TDengineCursor
|
|
||||||
from .subscription import TDengineSubscription
|
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
|
||||||
""" TDengine connection object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self._conn = None
|
|
||||||
self._host = None
|
|
||||||
self._user = "root"
|
|
||||||
self._password = "taosdata"
|
|
||||||
self._database = None
|
|
||||||
self._port = 0
|
|
||||||
self._config = None
|
|
||||||
self._chandle = None
|
|
||||||
|
|
||||||
self.config(**kwargs)
|
|
||||||
|
|
||||||
def config(self, **kwargs):
|
|
||||||
# host
|
|
||||||
if 'host' in kwargs:
|
|
||||||
self._host = kwargs['host']
|
|
||||||
|
|
||||||
# user
|
|
||||||
if 'user' in kwargs:
|
|
||||||
self._user = kwargs['user']
|
|
||||||
|
|
||||||
# password
|
|
||||||
if 'password' in kwargs:
|
|
||||||
self._password = kwargs['password']
|
|
||||||
|
|
||||||
# database
|
|
||||||
if 'database' in kwargs:
|
|
||||||
self._database = kwargs['database']
|
|
||||||
|
|
||||||
# port
|
|
||||||
if 'port' in kwargs:
|
|
||||||
self._port = kwargs['port']
|
|
||||||
|
|
||||||
# config
|
|
||||||
if 'config' in kwargs:
|
|
||||||
self._config = kwargs['config']
|
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
|
||||||
self._conn = self._chandle.connect(
|
|
||||||
self._host,
|
|
||||||
self._user,
|
|
||||||
self._password,
|
|
||||||
self._database,
|
|
||||||
self._port)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close current connection.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.close(self._conn)
|
|
||||||
|
|
||||||
def subscribe(self, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription.
|
|
||||||
"""
|
|
||||||
if self._conn is None:
|
|
||||||
return None
|
|
||||||
sub = CTaosInterface.subscribe(
|
|
||||||
self._conn, restart, topic, sql, interval)
|
|
||||||
return TDengineSubscription(sub)
|
|
||||||
|
|
||||||
def cursor(self):
|
|
||||||
"""Return a new Cursor object using the connection.
|
|
||||||
"""
|
|
||||||
return TDengineCursor(self)
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Commit any pending transaction to the database.
|
|
||||||
|
|
||||||
Since TDengine do not support transactions, the implement is void functionality.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Void functionality
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clear_result_set(self):
|
|
||||||
"""Clear unused result set on this connection.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
|
||||||
conn.close()
|
|
||||||
print("Hello world")
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""Constants in TDengine python
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .dbapi import *
|
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
|
||||||
"""TDengine Field Types
|
|
||||||
"""
|
|
||||||
# type_code
|
|
||||||
C_NULL = 0
|
|
||||||
C_BOOL = 1
|
|
||||||
C_TINYINT = 2
|
|
||||||
C_SMALLINT = 3
|
|
||||||
C_INT = 4
|
|
||||||
C_BIGINT = 5
|
|
||||||
C_FLOAT = 6
|
|
||||||
C_DOUBLE = 7
|
|
||||||
C_BINARY = 8
|
|
||||||
C_TIMESTAMP = 9
|
|
||||||
C_NCHAR = 10
|
|
||||||
C_TINYINT_UNSIGNED = 11
|
|
||||||
C_SMALLINT_UNSIGNED = 12
|
|
||||||
C_INT_UNSIGNED = 13
|
|
||||||
C_BIGINT_UNSIGNED = 14
|
|
||||||
# NULL value definition
|
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
|
||||||
C_BOOL_NULL = 0x02
|
|
||||||
C_TINYINT_NULL = -128
|
|
||||||
C_TINYINT_UNSIGNED_NULL = 255
|
|
||||||
C_SMALLINT_NULL = -32768
|
|
||||||
C_SMALLINT_UNSIGNED_NULL = 65535
|
|
||||||
C_INT_NULL = -2147483648
|
|
||||||
C_INT_UNSIGNED_NULL = 4294967295
|
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
|
||||||
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
|
||||||
C_FLOAT_NULL = float('nan')
|
|
||||||
C_DOUBLE_NULL = float('nan')
|
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
|
||||||
# Timestamp precision definition
|
|
||||||
C_TIMESTAMP_MILLI = 0
|
|
||||||
C_TIMESTAMP_MICRO = 1
|
|
|
@ -1,280 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
from .constants import FieldType
|
|
||||||
|
|
||||||
# querySeqNum = 0
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
.description: Read-only attribute consists of 7-item sequences:
|
|
||||||
|
|
||||||
> name (mondatory)
|
|
||||||
> type_code (mondatory)
|
|
||||||
> display_size
|
|
||||||
> internal_size
|
|
||||||
> precision
|
|
||||||
> scale
|
|
||||||
> null_ok
|
|
||||||
|
|
||||||
This attribute will be None for operations that do not return rows or
|
|
||||||
if the cursor has not had an operation invoked via the .execute*() method yet.
|
|
||||||
|
|
||||||
.rowcount:This read-only attribute specifies the number of rows that the last
|
|
||||||
.execute*() produced (for DQL statements like SELECT) or affected
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
self._connection = None
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
self._logfile = ""
|
|
||||||
|
|
||||||
if connection is not None:
|
|
||||||
self._connection = connection
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetch iterator")
|
|
||||||
|
|
||||||
if self._block_rows <= self._block_iter:
|
|
||||||
block, self._block_rows = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
if self._block_rows == 0:
|
|
||||||
raise StopIteration
|
|
||||||
self._block = list(map(tuple, zip(*block)))
|
|
||||||
self._block_iter = 0
|
|
||||||
|
|
||||||
data = self._block[self._block_iter]
|
|
||||||
self._block_iter += 1
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""Return the description of the object.
|
|
||||||
"""
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rowcount(self):
|
|
||||||
"""Return the rowcount of the object
|
|
||||||
"""
|
|
||||||
return self._rowcount
|
|
||||||
|
|
||||||
@property
|
|
||||||
def affected_rows(self):
|
|
||||||
"""Return the rowcount of insertion
|
|
||||||
"""
|
|
||||||
return self._affected_rows
|
|
||||||
|
|
||||||
def callproc(self, procname, *args):
|
|
||||||
"""Call a stored database procedure with the given name.
|
|
||||||
|
|
||||||
Void functionality since no stored procedures.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def log(self, logfile):
|
|
||||||
self._logfile = logfile
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close the cursor.
|
|
||||||
"""
|
|
||||||
if self._connection is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
self._connection = None
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def execute(self, operation, params=None):
|
|
||||||
"""Prepare and execute a database operation (query or command).
|
|
||||||
"""
|
|
||||||
if not operation:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._connection:
|
|
||||||
# TODO : change the exception raised here
|
|
||||||
raise ProgrammingError("Cursor is not connected")
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
|
|
||||||
stmt = operation
|
|
||||||
if params is not None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# global querySeqNum
|
|
||||||
# querySeqNum += 1
|
|
||||||
# localSeqNum = querySeqNum # avoid raice condition
|
|
||||||
# print(" >> Exec Query ({}): {}".format(localSeqNum, str(stmt)))
|
|
||||||
self._result = CTaosInterface.query(self._connection._conn, stmt)
|
|
||||||
# print(" << Query ({}) Exec Done".format(localSeqNum))
|
|
||||||
if (self._logfile):
|
|
||||||
with open(self._logfile, "a") as logfile:
|
|
||||||
logfile.write("%s;\n" % operation)
|
|
||||||
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno == 0:
|
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
|
||||||
self._result)
|
|
||||||
return CTaosInterface.affectedRows(self._result)
|
|
||||||
else:
|
|
||||||
self._fields = CTaosInterface.useResult(
|
|
||||||
self._result)
|
|
||||||
return self._handle_result()
|
|
||||||
else:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
|
|
||||||
def executemany(self, operation, seq_of_parameters):
|
|
||||||
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchone(self):
|
|
||||||
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchmany(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def istype(self, col, dataType):
|
|
||||||
if (dataType.upper() == "BOOL"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BOOL):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TINYINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TINYINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "SMALLINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_SMALLINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "SMALLINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "INT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_INT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "INT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BIGINT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BIGINT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BIGINT UNSIGNED"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "FLOAT"):
|
|
||||||
if (self._description[col][1] == FieldType.C_FLOAT):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "DOUBLE"):
|
|
||||||
if (self._description[col][1] == FieldType.C_DOUBLE):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "BINARY"):
|
|
||||||
if (self._description[col][1] == FieldType.C_BINARY):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "TIMESTAMP"):
|
|
||||||
if (self._description[col][1] == FieldType.C_TIMESTAMP):
|
|
||||||
return True
|
|
||||||
if (dataType.upper() == "NCHAR"):
|
|
||||||
if (self._description[col][1] == FieldType.C_NCHAR):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def fetchall_row(self):
|
|
||||||
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
|
||||||
"""
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def fetchall(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def nextset(self):
|
|
||||||
"""
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setinputsize(self, sizes):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setutputsize(self, size, column=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _reset_result(self):
|
|
||||||
"""Reset the result to unused version.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
if self._result is not None:
|
|
||||||
CTaosInterface.freeResult(self._result)
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
|
|
||||||
def _handle_result(self):
|
|
||||||
"""Handle the return result from query.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
for ele in self._fields:
|
|
||||||
self._description.append(
|
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
|
||||||
|
|
||||||
return self._result
|
|
|
@ -1,44 +0,0 @@
|
||||||
"""Type Objects and Constructors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
|
||||||
def __init__(self, *values):
|
|
||||||
self.values = values
|
|
||||||
|
|
||||||
def __com__(self, other):
|
|
||||||
if other in self.values:
|
|
||||||
return 0
|
|
||||||
if other < self.values:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
|
||||||
Time = datetime.time
|
|
||||||
Timestamp = datetime.datetime
|
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
|
||||||
return Date(*time.localtime(ticks)[:3])
|
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
|
||||||
# ROWID = DBAPITypeObject()
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""Python exceptions
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
def __init__(self, msg=None, errno=None):
|
|
||||||
self.msg = msg
|
|
||||||
self._full_msg = self.msg
|
|
||||||
self.errno = errno
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self._full_msg
|
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(DatabaseError):
|
|
||||||
"""Exception raised when the relational integrity of the database is affected.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
|
||||||
"""Exception raised when the database encounters an internal error.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
|
||||||
"""Exception raised for programming errors.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -1,57 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
|
||||||
"""TDengine subscription object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, sub):
|
|
||||||
self._sub = sub
|
|
||||||
|
|
||||||
def consume(self):
|
|
||||||
"""Consume rows of a subscription
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
raise OperationalError("Invalid use of consume")
|
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
|
||||||
buffer = [[] for i in range(len(fields))]
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
for i in range(len(fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
self.fields = fields
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def close(self, keepProgress=True):
|
|
||||||
"""Close the Subscription.
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
conn = TDengineConnection(
|
|
||||||
host="127.0.0.1",
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
database="test")
|
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
|
||||||
|
|
||||||
for i in range(0, 10):
|
|
||||||
data = sub.consume()
|
|
||||||
for d in data:
|
|
||||||
print(d)
|
|
||||||
|
|
||||||
sub.close()
|
|
||||||
conn.close()
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
import setuptools
|
||||||
|
|
||||||
|
with open("README.md", "r") as fh:
|
||||||
|
long_description = fh.read()
|
||||||
|
|
||||||
|
setuptools.setup(
|
||||||
|
name="taos",
|
||||||
|
version="2.0.10",
|
||||||
|
author="Taosdata Inc.",
|
||||||
|
author_email="support@taosdata.com",
|
||||||
|
description="TDengine python client package",
|
||||||
|
long_description=long_description,
|
||||||
|
long_description_content_type="text/markdown",
|
||||||
|
url="https://github.com/taosdata/TDengine/tree/develop/src/connector/python",
|
||||||
|
packages=setuptools.find_packages(),
|
||||||
|
classifiers=[
|
||||||
|
"Environment :: Console",
|
||||||
|
"Environment :: MacOS X",
|
||||||
|
"Environment :: Win32 (MS Windows)",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
||||||
|
"Operating System :: MacOS",
|
||||||
|
"Programming Language :: Python :: 2.7",
|
||||||
|
"Operating System :: Linux",
|
||||||
|
"Operating System :: POSIX :: Linux",
|
||||||
|
"Operating System :: Microsoft :: Windows",
|
||||||
|
"Operating System :: Microsoft :: Windows :: Windows 10",
|
||||||
|
],
|
||||||
|
)
|
|
@ -3,6 +3,7 @@ from .constants import FieldType
|
||||||
from .error import *
|
from .error import *
|
||||||
import math
|
import math
|
||||||
import datetime
|
import datetime
|
||||||
|
import platform
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
def _convert_millisecond_to_datetime(milli):
|
||||||
|
@ -20,13 +21,6 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
if micro:
|
if micro:
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
_timestamp_converter = _convert_microsecond_to_datetime
|
||||||
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -37,27 +31,16 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bool row to python row
|
"""Function to convert C bool row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
ctypes.c_byte))[
|
ctypes.c_byte))[
|
||||||
:abs(num_of_rows)]]
|
:abs(num_of_rows)]]
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_bool))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C tinyint row to python row
|
"""Function to convert C tinyint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -69,13 +52,6 @@ def _crow_tinyint_unsigned_to_python(
|
||||||
micro=False):
|
micro=False):
|
||||||
"""Function to convert C tinyint row to python row
|
"""Function to convert C tinyint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -86,13 +62,6 @@ def _crow_tinyint_unsigned_to_python(
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C smallint row to python row
|
"""Function to convert C smallint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -104,13 +73,6 @@ def _crow_smallint_unsigned_to_python(
|
||||||
data, num_of_rows, nbytes=None, micro=False):
|
data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C smallint row to python row
|
"""Function to convert C smallint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -121,10 +83,6 @@ def _crow_smallint_unsigned_to_python(
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C int row to python row
|
"""Function to convert C int row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -132,13 +90,6 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C int row to python row
|
"""Function to convert C int row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -149,10 +100,6 @@ def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C bigint row to python row
|
"""Function to convert C bigint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -164,13 +111,6 @@ def _crow_bigint_unsigned_to_python(
|
||||||
micro=False):
|
micro=False):
|
||||||
"""Function to convert C bigint row to python row
|
"""Function to convert C bigint row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
return [
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(
|
data, ctypes.POINTER(
|
||||||
|
@ -181,10 +121,6 @@ def _crow_bigint_unsigned_to_python(
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C float row to python row
|
"""Function to convert C float row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -192,10 +128,6 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C double row to python row
|
"""Function to convert C double row to python row
|
||||||
"""
|
"""
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -204,10 +136,6 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""Function to convert C binary row to python row
|
"""Function to convert C binary row to python row
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
||||||
|
|
||||||
|
@ -236,19 +164,6 @@ def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res = []
|
res = []
|
||||||
if num_of_rows > 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
rbyte = ctypes.cast(
|
rbyte = ctypes.cast(
|
||||||
|
@ -268,20 +183,12 @@ def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
||||||
"""
|
"""
|
||||||
assert(nbytes is not None)
|
assert(nbytes is not None)
|
||||||
res = []
|
res = []
|
||||||
if num_of_rows >= 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
for i in range(abs(num_of_rows)):
|
||||||
try:
|
try:
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
||||||
res.append(tmpstr.value.decode())
|
res.append(tmpstr.value.decode())
|
||||||
except ValueError:
|
except ValueError:
|
||||||
res.append(None)
|
res.append(None)
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i + 2,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
@ -330,9 +237,33 @@ class TaosField(ctypes.Structure):
|
||||||
# C interface class
|
# C interface class
|
||||||
|
|
||||||
|
|
||||||
|
def _load_taos_linux():
|
||||||
|
return ctypes.CDLL('libtaos.so')
|
||||||
|
|
||||||
|
|
||||||
|
def _load_taos_darwin():
|
||||||
|
return ctypes.cDLL('libtaos.dylib')
|
||||||
|
|
||||||
|
|
||||||
|
def _load_taos_windows():
|
||||||
|
return ctypes.windll.LoadLibrary('taos')
|
||||||
|
|
||||||
|
|
||||||
|
def _load_taos():
|
||||||
|
load_func = {
|
||||||
|
'Linux': _load_taos_linux,
|
||||||
|
'Darwin': _load_taos_darwin,
|
||||||
|
'Windows': _load_taos_windows,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
return load_func[platform.system()]()
|
||||||
|
except:
|
||||||
|
sys.exit('unsupported platform to TDengine connector')
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
class CTaosInterface(object):
|
||||||
|
|
||||||
libtaos = ctypes.CDLL('libtaos.so')
|
libtaos = _load_taos()
|
||||||
|
|
||||||
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
||||||
libtaos.taos_init.restype = None
|
libtaos.taos_init.restype = None
|
|
@ -45,6 +45,12 @@ class TDengineCursor(object):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
|
return self._taos_next()
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return self._taos_next()
|
||||||
|
|
||||||
|
def _taos_next(self):
|
||||||
if self._result is None or self._fields is None:
|
if self._result is None or self._fields is None:
|
||||||
raise OperationalError("Invalid use of fetch iterator")
|
raise OperationalError("Invalid use of fetch iterator")
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
|
|
||||||
|
|
||||||
This program is free software: you can use, redistribute, and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License, version 3
|
|
||||||
or later ("AGPL"), as published by the Free Software Foundation.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT
|
|
||||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
@ -1 +0,0 @@
|
||||||
# TDengine python client interface
|
|
|
@ -1,20 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
with open("README.md", "r") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setuptools.setup(
|
|
||||||
name="taos",
|
|
||||||
version="2.0.9",
|
|
||||||
author="Taosdata Inc.",
|
|
||||||
author_email="support@taosdata.com",
|
|
||||||
description="TDengine python client package",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/pypa/sampleproject",
|
|
||||||
packages=setuptools.find_packages(),
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 2",
|
|
||||||
"Operating System :: Windows",
|
|
||||||
],
|
|
||||||
)
|
|
|
@ -1,24 +0,0 @@
|
||||||
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
from .cursor import TDengineCursor
|
|
||||||
|
|
||||||
# Globals
|
|
||||||
threadsafety = 0
|
|
||||||
paramstyle = 'pyformat'
|
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
|
||||||
""" Function to return a TDengine connector object
|
|
||||||
|
|
||||||
Current supporting keyword parameters:
|
|
||||||
@dsn: Data source name as string
|
|
||||||
@user: Username as string(optional)
|
|
||||||
@password: Password as string(optional)
|
|
||||||
@host: Hostname(optional)
|
|
||||||
@database: Database name(optional)
|
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
|
||||||
"""
|
|
||||||
return TDengineConnection(*args, **kwargs)
|
|
|
@ -1,648 +0,0 @@
|
||||||
import ctypes
|
|
||||||
from .constants import FieldType
|
|
||||||
from .error import *
|
|
||||||
import math
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
|
||||||
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
|
||||||
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
_timestamp_converter = _convert_millisecond_to_datetime
|
|
||||||
if micro:
|
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
|
||||||
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_byte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_bool))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_unsigned_to_python(
|
|
||||||
data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C float row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C double row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
tmpstr = ctypes.c_char_p(data)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
else:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows > 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i + 2,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
|
||||||
('type', ctypes.c_char),
|
|
||||||
('bytes', ctypes.c_short)]
|
|
||||||
|
|
||||||
# C interface class
|
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
|
||||||
|
|
||||||
libtaos = ctypes.windll.LoadLibrary('taos')
|
|
||||||
|
|
||||||
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
|
||||||
libtaos.taos_init.restype = None
|
|
||||||
libtaos.taos_connect.restype = ctypes.c_void_p
|
|
||||||
#libtaos.taos_use_result.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
libtaos.taos_errstr.restype = ctypes.c_char_p
|
|
||||||
libtaos.taos_subscribe.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_consume.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_free_result.restype = None
|
|
||||||
libtaos.taos_errno.restype = ctypes.c_int
|
|
||||||
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
|
|
||||||
def __init__(self, config=None):
|
|
||||||
'''
|
|
||||||
Function to initialize the class
|
|
||||||
@host : str, hostname to connect
|
|
||||||
@user : str, username to connect to server
|
|
||||||
@password : str, password to connect to server
|
|
||||||
@db : str, default db to use when log in
|
|
||||||
@config : str, config directory
|
|
||||||
|
|
||||||
@rtype : None
|
|
||||||
'''
|
|
||||||
if config is None:
|
|
||||||
self._config = ctypes.c_char_p(None)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._config = ctypes.c_char_p(config.encode('utf-8'))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("config is expected as a str")
|
|
||||||
|
|
||||||
if config is not None:
|
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
""" Get current config
|
|
||||||
"""
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def connect(
|
|
||||||
self,
|
|
||||||
host=None,
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
db=None,
|
|
||||||
port=0):
|
|
||||||
'''
|
|
||||||
Function to connect to server
|
|
||||||
|
|
||||||
@rtype: c_void_p, TDengine handle
|
|
||||||
'''
|
|
||||||
# host
|
|
||||||
try:
|
|
||||||
_host = ctypes.c_char_p(host.encode(
|
|
||||||
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("host is expected as a str")
|
|
||||||
|
|
||||||
# user
|
|
||||||
try:
|
|
||||||
_user = ctypes.c_char_p(user.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("user is expected as a str")
|
|
||||||
|
|
||||||
# password
|
|
||||||
try:
|
|
||||||
_password = ctypes.c_char_p(password.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("password is expected as a str")
|
|
||||||
|
|
||||||
# db
|
|
||||||
try:
|
|
||||||
_db = ctypes.c_char_p(
|
|
||||||
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("db is expected as a str")
|
|
||||||
|
|
||||||
# port
|
|
||||||
try:
|
|
||||||
_port = ctypes.c_int(port)
|
|
||||||
except TypeError:
|
|
||||||
raise TypeError("port is expected as an int")
|
|
||||||
|
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
|
||||||
_host, _user, _password, _db, _port))
|
|
||||||
|
|
||||||
if connection.value is None:
|
|
||||||
print('connect to TDengine failed')
|
|
||||||
raise ConnectionError("connect to TDengine failed")
|
|
||||||
# sys.exit(1)
|
|
||||||
# else:
|
|
||||||
# print('connect to TDengine success')
|
|
||||||
|
|
||||||
return connection
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def close(connection):
|
|
||||||
'''Close the TDengine handle
|
|
||||||
'''
|
|
||||||
CTaosInterface.libtaos.taos_close(connection)
|
|
||||||
#print('connection is closed')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query(connection, sql):
|
|
||||||
'''Run SQL
|
|
||||||
|
|
||||||
@sql: str, sql string to run
|
|
||||||
|
|
||||||
@rtype: 0 on success and -1 on failure
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
return CTaosInterface.libtaos.taos_query(
|
|
||||||
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("sql is expected as a string")
|
|
||||||
# finally:
|
|
||||||
# CTaosInterface.libtaos.close(connection)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def affectedRows(result):
|
|
||||||
"""The affected rows after runing query
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_affected_rows(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription
|
|
||||||
@restart boolean,
|
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
|
||||||
@topic string, name of this subscription
|
|
||||||
"""
|
|
||||||
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
|
|
||||||
connection,
|
|
||||||
1 if restart else 0,
|
|
||||||
ctypes.c_char_p(topic.encode('utf-8')),
|
|
||||||
ctypes.c_char_p(sql.encode('utf-8')),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
interval))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def consume(sub):
|
|
||||||
"""Consume data of a subscription
|
|
||||||
"""
|
|
||||||
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
return result, fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def unsubscribe(sub, keepProgress):
|
|
||||||
"""Cancel a subscription
|
|
||||||
"""
|
|
||||||
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def useResult(result):
|
|
||||||
'''Use result after calling self.query
|
|
||||||
'''
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.fieldsCount(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
|
|
||||||
return fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchBlock(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
|
|
||||||
result, ctypes.byref(pblock))
|
|
||||||
if num_of_rows == 0:
|
|
||||||
return None, 0
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchRow(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
if pblock:
|
|
||||||
num_of_rows = 1
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
|
||||||
raise DatabaseError(
|
|
||||||
"Invalid data type returned from database")
|
|
||||||
if data is None:
|
|
||||||
blocks[i] = [None]
|
|
||||||
else:
|
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
else:
|
|
||||||
return None, 0
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def freeResult(result):
|
|
||||||
CTaosInterface.libtaos.taos_free_result(result)
|
|
||||||
result.value = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fieldsCount(result):
|
|
||||||
return CTaosInterface.libtaos.taos_field_count(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchFields(result):
|
|
||||||
return CTaosInterface.libtaos.taos_fetch_fields(result)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def fetchRow(result, fields):
|
|
||||||
# l = []
|
|
||||||
# row = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
# if not row:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# for i in range(len(fields)):
|
|
||||||
# l.append(CTaosInterface.getDataValue(
|
|
||||||
# row[i], fields[i]['type'], fields[i]['bytes']))
|
|
||||||
|
|
||||||
# return tuple(l)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def getDataValue(data, dtype, byte):
|
|
||||||
# '''
|
|
||||||
# '''
|
|
||||||
# if not data:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
|
|
||||||
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
|
|
||||||
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errno(result):
|
|
||||||
"""Return the error number.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errno(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errStr(result):
|
|
||||||
"""Return the error styring
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cinter = CTaosInterface()
|
|
||||||
conn = cinter.connect()
|
|
||||||
result = cinter.query(conn, 'show databases')
|
|
||||||
|
|
||||||
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
|
|
||||||
|
|
||||||
fields = CTaosInterface.useResult(result)
|
|
||||||
|
|
||||||
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
|
|
||||||
print(data)
|
|
||||||
|
|
||||||
cinter.freeResult(result)
|
|
||||||
cinter.close(conn)
|
|
|
@ -1,96 +0,0 @@
|
||||||
from .cursor import TDengineCursor
|
|
||||||
from .subscription import TDengineSubscription
|
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
|
||||||
""" TDengine connection object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self._conn = None
|
|
||||||
self._host = None
|
|
||||||
self._user = "root"
|
|
||||||
self._password = "taosdata"
|
|
||||||
self._database = None
|
|
||||||
self._port = 0
|
|
||||||
self._config = None
|
|
||||||
self._chandle = None
|
|
||||||
|
|
||||||
if len(kwargs) > 0:
|
|
||||||
self.config(**kwargs)
|
|
||||||
|
|
||||||
def config(self, **kwargs):
|
|
||||||
# host
|
|
||||||
if 'host' in kwargs:
|
|
||||||
self._host = kwargs['host']
|
|
||||||
|
|
||||||
# user
|
|
||||||
if 'user' in kwargs:
|
|
||||||
self._user = kwargs['user']
|
|
||||||
|
|
||||||
# password
|
|
||||||
if 'password' in kwargs:
|
|
||||||
self._password = kwargs['password']
|
|
||||||
|
|
||||||
# database
|
|
||||||
if 'database' in kwargs:
|
|
||||||
self._database = kwargs['database']
|
|
||||||
|
|
||||||
# port
|
|
||||||
if 'port' in kwargs:
|
|
||||||
self._port = kwargs['port']
|
|
||||||
|
|
||||||
# config
|
|
||||||
if 'config' in kwargs:
|
|
||||||
self._config = kwargs['config']
|
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
|
||||||
self._conn = self._chandle.connect(
|
|
||||||
self._host,
|
|
||||||
self._user,
|
|
||||||
self._password,
|
|
||||||
self._database,
|
|
||||||
self._port)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close current connection.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.close(self._conn)
|
|
||||||
|
|
||||||
def subscribe(self, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription.
|
|
||||||
"""
|
|
||||||
if self._conn is None:
|
|
||||||
return None
|
|
||||||
sub = CTaosInterface.subscribe(
|
|
||||||
self._conn, restart, topic, sql, interval)
|
|
||||||
return TDengineSubscription(sub)
|
|
||||||
|
|
||||||
def cursor(self):
|
|
||||||
"""Return a new Cursor object using the connection.
|
|
||||||
"""
|
|
||||||
return TDengineCursor(self)
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Commit any pending transaction to the database.
|
|
||||||
|
|
||||||
Since TDengine do not support transactions, the implement is void functionality.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Void functionality
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clear_result_set(self):
|
|
||||||
"""Clear unused result set on this connection.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
|
||||||
conn.close()
|
|
||||||
print("Hello world")
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""Constants in TDengine python
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .dbapi import *
|
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
|
||||||
"""TDengine Field Types
|
|
||||||
"""
|
|
||||||
# type_code
|
|
||||||
C_NULL = 0
|
|
||||||
C_BOOL = 1
|
|
||||||
C_TINYINT = 2
|
|
||||||
C_SMALLINT = 3
|
|
||||||
C_INT = 4
|
|
||||||
C_BIGINT = 5
|
|
||||||
C_FLOAT = 6
|
|
||||||
C_DOUBLE = 7
|
|
||||||
C_BINARY = 8
|
|
||||||
C_TIMESTAMP = 9
|
|
||||||
C_NCHAR = 10
|
|
||||||
C_TINYINT_UNSIGNED = 11
|
|
||||||
C_SMALLINT_UNSIGNED = 12
|
|
||||||
C_INT_UNSIGNED = 13
|
|
||||||
C_BIGINT_UNSIGNED = 14
|
|
||||||
# NULL value definition
|
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
|
||||||
C_BOOL_NULL = 0x02
|
|
||||||
C_TINYINT_NULL = -128
|
|
||||||
C_TINYINT_UNSIGNED_NULL = 255
|
|
||||||
C_SMALLINT_NULL = -32768
|
|
||||||
C_SMALLINT_UNSIGNED_NULL = 65535
|
|
||||||
C_INT_NULL = -2147483648
|
|
||||||
C_INT_UNSIGNED_NULL = 4294967295
|
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
|
||||||
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
|
||||||
C_FLOAT_NULL = float('nan')
|
|
||||||
C_DOUBLE_NULL = float('nan')
|
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
|
||||||
# Time precision definition
|
|
||||||
C_TIMESTAMP_MILLI = 0
|
|
||||||
C_TIMESTAMP_MICRO = 1
|
|
|
@ -1,220 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
from .constants import FieldType
|
|
||||||
|
|
||||||
# querySeqNum = 0
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
.description: Read-only attribute consists of 7-item sequences:
|
|
||||||
|
|
||||||
> name (mondatory)
|
|
||||||
> type_code (mondatory)
|
|
||||||
> display_size
|
|
||||||
> internal_size
|
|
||||||
> precision
|
|
||||||
> scale
|
|
||||||
> null_ok
|
|
||||||
|
|
||||||
This attribute will be None for operations that do not return rows or
|
|
||||||
if the cursor has not had an operation invoked via the .execute*() method yet.
|
|
||||||
|
|
||||||
.rowcount:This read-only attribute specifies the number of rows that the last
|
|
||||||
.execute*() produced (for DQL statements like SELECT) or affected
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
self._connection = None
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
self._logfile = ""
|
|
||||||
|
|
||||||
if connection is not None:
|
|
||||||
self._connection = connection
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetch iterator")
|
|
||||||
|
|
||||||
if self._block_rows <= self._block_iter:
|
|
||||||
block, self._block_rows = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
if self._block_rows == 0:
|
|
||||||
raise StopIteration
|
|
||||||
self._block = list(map(tuple, zip(*block)))
|
|
||||||
self._block_iter = 0
|
|
||||||
|
|
||||||
data = self._block[self._block_iter]
|
|
||||||
self._block_iter += 1
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""Return the description of the object.
|
|
||||||
"""
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rowcount(self):
|
|
||||||
"""Return the rowcount of the object
|
|
||||||
"""
|
|
||||||
return self._rowcount
|
|
||||||
|
|
||||||
@property
|
|
||||||
def affected_rows(self):
|
|
||||||
"""Return the affected_rows of the object
|
|
||||||
"""
|
|
||||||
return self._affected_rows
|
|
||||||
|
|
||||||
def callproc(self, procname, *args):
|
|
||||||
"""Call a stored database procedure with the given name.
|
|
||||||
|
|
||||||
Void functionality since no stored procedures.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close the cursor.
|
|
||||||
"""
|
|
||||||
if self._connection is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
self._connection = None
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def execute(self, operation, params=None):
|
|
||||||
"""Prepare and execute a database operation (query or command).
|
|
||||||
"""
|
|
||||||
if not operation:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._connection:
|
|
||||||
# TODO : change the exception raised here
|
|
||||||
raise ProgrammingError("Cursor is not connected")
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
|
|
||||||
stmt = operation
|
|
||||||
if params is not None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
self._result = CTaosInterface.query(self._connection._conn, stmt)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno == 0:
|
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
|
||||||
self._result)
|
|
||||||
return CTaosInterface.affectedRows(self._result)
|
|
||||||
else:
|
|
||||||
self._fields = CTaosInterface.useResult(self._result)
|
|
||||||
return self._handle_result()
|
|
||||||
else:
|
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
|
||||||
|
|
||||||
def executemany(self, operation, seq_of_parameters):
|
|
||||||
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchone(self):
|
|
||||||
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchmany(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchall_row(self):
|
|
||||||
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
|
||||||
"""
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def fetchall(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def nextset(self):
|
|
||||||
"""
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setinputsize(self, sizes):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setutputsize(self, size, column=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _reset_result(self):
|
|
||||||
"""Reset the result to unused version.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
if self._result is not None:
|
|
||||||
CTaosInterface.freeResult(self._result)
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
|
|
||||||
def _handle_result(self):
|
|
||||||
"""Handle the return result from query.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
for ele in self._fields:
|
|
||||||
self._description.append(
|
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
|
||||||
|
|
||||||
return self._result
|
|
|
@ -1,44 +0,0 @@
|
||||||
"""Type Objects and Constructors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
|
||||||
def __init__(self, *values):
|
|
||||||
self.values = values
|
|
||||||
|
|
||||||
def __com__(self, other):
|
|
||||||
if other in self.values:
|
|
||||||
return 0
|
|
||||||
if other < self.values:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
|
||||||
Time = datetime.time
|
|
||||||
Timestamp = datetime.datetime
|
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
|
||||||
return Date(*time.localtime(ticks)[:3])
|
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
|
||||||
# ROWID = DBAPITypeObject()
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""Python exceptions
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
def __init__(self, msg=None, errno=None):
|
|
||||||
self.msg = msg
|
|
||||||
self._full_msg = self.msg
|
|
||||||
self.errno = errno
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self._full_msg
|
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(DatabaseError):
|
|
||||||
"""Exception raised when the relational integrity of the database is affected.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
|
||||||
"""Exception raised when the database encounters an internal error.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
|
||||||
"""Exception raised for programming errors.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -1,57 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
|
||||||
"""TDengine subscription object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, sub):
|
|
||||||
self._sub = sub
|
|
||||||
|
|
||||||
def consume(self):
|
|
||||||
"""Consume rows of a subscription
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
raise OperationalError("Invalid use of consume")
|
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
|
||||||
buffer = [[] for i in range(len(fields))]
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
for i in range(len(fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
self.fields = fields
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def close(self, keepProgress=True):
|
|
||||||
"""Close the Subscription.
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
conn = TDengineConnection(
|
|
||||||
host="127.0.0.1",
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
database="test")
|
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
|
||||||
|
|
||||||
for i in range(0, 10):
|
|
||||||
data = sub.consume()
|
|
||||||
for d in data:
|
|
||||||
print(d)
|
|
||||||
|
|
||||||
sub.close()
|
|
||||||
conn.close()
|
|
|
@ -1,12 +0,0 @@
|
||||||
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
|
|
||||||
|
|
||||||
This program is free software: you can use, redistribute, and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License, version 3
|
|
||||||
or later ("AGPL"), as published by the Free Software Foundation.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful, but WITHOUT
|
|
||||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
@ -1 +0,0 @@
|
||||||
# TDengine python client interface
|
|
|
@ -1,20 +0,0 @@
|
||||||
import setuptools
|
|
||||||
|
|
||||||
with open("README.md", "r") as fh:
|
|
||||||
long_description = fh.read()
|
|
||||||
|
|
||||||
setuptools.setup(
|
|
||||||
name="taos",
|
|
||||||
version="2.0.9",
|
|
||||||
author="Taosdata Inc.",
|
|
||||||
author_email="support@taosdata.com",
|
|
||||||
description="TDengine python client package",
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/markdown",
|
|
||||||
url="https://github.com/pypa/sampleproject",
|
|
||||||
packages=setuptools.find_packages(),
|
|
||||||
classifiers=[
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Operating System :: Windows",
|
|
||||||
],
|
|
||||||
)
|
|
|
@ -1,24 +0,0 @@
|
||||||
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
from .cursor import TDengineCursor
|
|
||||||
|
|
||||||
# Globals
|
|
||||||
threadsafety = 0
|
|
||||||
paramstyle = 'pyformat'
|
|
||||||
|
|
||||||
__all__ = ['connection', 'cursor']
|
|
||||||
|
|
||||||
|
|
||||||
def connect(*args, **kwargs):
|
|
||||||
""" Function to return a TDengine connector object
|
|
||||||
|
|
||||||
Current supporting keyword parameters:
|
|
||||||
@dsn: Data source name as string
|
|
||||||
@user: Username as string(optional)
|
|
||||||
@password: Password as string(optional)
|
|
||||||
@host: Hostname(optional)
|
|
||||||
@database: Database name(optional)
|
|
||||||
|
|
||||||
@rtype: TDengineConnector
|
|
||||||
"""
|
|
||||||
return TDengineConnection(*args, **kwargs)
|
|
|
@ -1,648 +0,0 @@
|
||||||
import ctypes
|
|
||||||
from .constants import FieldType
|
|
||||||
from .error import *
|
|
||||||
import math
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_millisecond_to_datetime(milli):
|
|
||||||
return datetime.datetime.fromtimestamp(milli / 1000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_microsecond_to_datetime(micro):
|
|
||||||
return datetime.datetime.fromtimestamp(micro / 1000000.0)
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
_timestamp_converter = _convert_millisecond_to_datetime
|
|
||||||
if micro:
|
|
||||||
_timestamp_converter = _convert_microsecond_to_datetime
|
|
||||||
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_int64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bool row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_byte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_bool))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_tinyint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C tinyint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ubyte))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_smallint_unsigned_to_python(
|
|
||||||
data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C smallint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_ushort))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C int row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_bigint_unsigned_to_python(
|
|
||||||
data,
|
|
||||||
num_of_rows,
|
|
||||||
nbytes=None,
|
|
||||||
micro=False):
|
|
||||||
"""Function to convert C bigint row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [
|
|
||||||
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(
|
|
||||||
ctypes.c_uint64))[
|
|
||||||
:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C float row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C double row to python row
|
|
||||||
"""
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
|
|
||||||
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
if num_of_rows > 0:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
else:
|
|
||||||
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
|
|
||||||
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
tmpstr = ctypes.c_char_p(data)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
else:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C binary row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows > 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
rbyte = ctypes.cast(
|
|
||||||
data + nbytes * i,
|
|
||||||
ctypes.POINTER(
|
|
||||||
ctypes.c_short))[
|
|
||||||
:1].pop()
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode()[0:rbyte])
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
|
|
||||||
"""Function to convert C nchar row to python row
|
|
||||||
"""
|
|
||||||
assert(nbytes is not None)
|
|
||||||
res = []
|
|
||||||
if num_of_rows >= 0:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
|
|
||||||
res.append(tmpstr.value.decode())
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
else:
|
|
||||||
for i in range(abs(num_of_rows)):
|
|
||||||
try:
|
|
||||||
res.append((ctypes.cast(data + nbytes * i + 2,
|
|
||||||
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
|
|
||||||
except ValueError:
|
|
||||||
res.append(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
_CONVERT_FUNC = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
_CONVERT_FUNC_BLOCK = {
|
|
||||||
FieldType.C_BOOL: _crow_bool_to_python,
|
|
||||||
FieldType.C_TINYINT: _crow_tinyint_to_python,
|
|
||||||
FieldType.C_SMALLINT: _crow_smallint_to_python,
|
|
||||||
FieldType.C_INT: _crow_int_to_python,
|
|
||||||
FieldType.C_BIGINT: _crow_bigint_to_python,
|
|
||||||
FieldType.C_FLOAT: _crow_float_to_python,
|
|
||||||
FieldType.C_DOUBLE: _crow_double_to_python,
|
|
||||||
FieldType.C_BINARY: _crow_binary_to_python_block,
|
|
||||||
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
|
|
||||||
FieldType.C_NCHAR: _crow_nchar_to_python_block,
|
|
||||||
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
|
|
||||||
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
|
|
||||||
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
|
|
||||||
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
|
|
||||||
}
|
|
||||||
|
|
||||||
# Corresponding TAOS_FIELD structure in C
|
|
||||||
|
|
||||||
|
|
||||||
class TaosField(ctypes.Structure):
|
|
||||||
_fields_ = [('name', ctypes.c_char * 65),
|
|
||||||
('type', ctypes.c_char),
|
|
||||||
('bytes', ctypes.c_short)]
|
|
||||||
|
|
||||||
# C interface class
|
|
||||||
|
|
||||||
|
|
||||||
class CTaosInterface(object):
|
|
||||||
|
|
||||||
libtaos = ctypes.windll.LoadLibrary('taos')
|
|
||||||
|
|
||||||
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
|
|
||||||
libtaos.taos_init.restype = None
|
|
||||||
libtaos.taos_connect.restype = ctypes.c_void_p
|
|
||||||
#libtaos.taos_use_result.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
libtaos.taos_errstr.restype = ctypes.c_char_p
|
|
||||||
libtaos.taos_subscribe.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_consume.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
|
|
||||||
libtaos.taos_free_result.restype = None
|
|
||||||
libtaos.taos_errno.restype = ctypes.c_int
|
|
||||||
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
|
|
||||||
|
|
||||||
def __init__(self, config=None):
|
|
||||||
'''
|
|
||||||
Function to initialize the class
|
|
||||||
@host : str, hostname to connect
|
|
||||||
@user : str, username to connect to server
|
|
||||||
@password : str, password to connect to server
|
|
||||||
@db : str, default db to use when log in
|
|
||||||
@config : str, config directory
|
|
||||||
|
|
||||||
@rtype : None
|
|
||||||
'''
|
|
||||||
if config is None:
|
|
||||||
self._config = ctypes.c_char_p(None)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
self._config = ctypes.c_char_p(config.encode('utf-8'))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("config is expected as a str")
|
|
||||||
|
|
||||||
if config is not None:
|
|
||||||
CTaosInterface.libtaos.taos_options(3, self._config)
|
|
||||||
|
|
||||||
CTaosInterface.libtaos.taos_init()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def config(self):
|
|
||||||
""" Get current config
|
|
||||||
"""
|
|
||||||
return self._config
|
|
||||||
|
|
||||||
def connect(
|
|
||||||
self,
|
|
||||||
host=None,
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
db=None,
|
|
||||||
port=0):
|
|
||||||
'''
|
|
||||||
Function to connect to server
|
|
||||||
|
|
||||||
@rtype: c_void_p, TDengine handle
|
|
||||||
'''
|
|
||||||
# host
|
|
||||||
try:
|
|
||||||
_host = ctypes.c_char_p(host.encode(
|
|
||||||
"utf-8")) if host is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("host is expected as a str")
|
|
||||||
|
|
||||||
# user
|
|
||||||
try:
|
|
||||||
_user = ctypes.c_char_p(user.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("user is expected as a str")
|
|
||||||
|
|
||||||
# password
|
|
||||||
try:
|
|
||||||
_password = ctypes.c_char_p(password.encode("utf-8"))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("password is expected as a str")
|
|
||||||
|
|
||||||
# db
|
|
||||||
try:
|
|
||||||
_db = ctypes.c_char_p(
|
|
||||||
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("db is expected as a str")
|
|
||||||
|
|
||||||
# port
|
|
||||||
try:
|
|
||||||
_port = ctypes.c_int(port)
|
|
||||||
except TypeError:
|
|
||||||
raise TypeError("port is expected as an int")
|
|
||||||
|
|
||||||
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
|
|
||||||
_host, _user, _password, _db, _port))
|
|
||||||
|
|
||||||
if connection.value is None:
|
|
||||||
print('connect to TDengine failed')
|
|
||||||
raise ConnectionError("connect to TDengine failed")
|
|
||||||
# sys.exit(1)
|
|
||||||
# else:
|
|
||||||
# print('connect to TDengine success')
|
|
||||||
|
|
||||||
return connection
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def close(connection):
|
|
||||||
'''Close the TDengine handle
|
|
||||||
'''
|
|
||||||
CTaosInterface.libtaos.taos_close(connection)
|
|
||||||
#print('connection is closed')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query(connection, sql):
|
|
||||||
'''Run SQL
|
|
||||||
|
|
||||||
@sql: str, sql string to run
|
|
||||||
|
|
||||||
@rtype: 0 on success and -1 on failure
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
return CTaosInterface.libtaos.taos_query(
|
|
||||||
connection, ctypes.c_char_p(sql.encode('utf-8')))
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError("sql is expected as a string")
|
|
||||||
# finally:
|
|
||||||
# CTaosInterface.libtaos.close(connection)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def affectedRows(result):
|
|
||||||
"""The affected rows after runing query
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_affected_rows(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def subscribe(connection, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription
|
|
||||||
@restart boolean,
|
|
||||||
@sql string, sql statement for data query, must be a 'select' statement.
|
|
||||||
@topic string, name of this subscription
|
|
||||||
"""
|
|
||||||
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
|
|
||||||
connection,
|
|
||||||
1 if restart else 0,
|
|
||||||
ctypes.c_char_p(topic.encode('utf-8')),
|
|
||||||
ctypes.c_char_p(sql.encode('utf-8')),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
interval))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def consume(sub):
|
|
||||||
"""Consume data of a subscription
|
|
||||||
"""
|
|
||||||
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
return result, fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def unsubscribe(sub, keepProgress):
|
|
||||||
"""Cancel a subscription
|
|
||||||
"""
|
|
||||||
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def useResult(result):
|
|
||||||
'''Use result after calling self.query
|
|
||||||
'''
|
|
||||||
fields = []
|
|
||||||
pfields = CTaosInterface.fetchFields(result)
|
|
||||||
for i in range(CTaosInterface.fieldsCount(result)):
|
|
||||||
fields.append({'name': pfields[i].name.decode('utf-8'),
|
|
||||||
'bytes': pfields[i].bytes,
|
|
||||||
'type': ord(pfields[i].type)})
|
|
||||||
|
|
||||||
return fields
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchBlock(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
|
|
||||||
result, ctypes.byref(pblock))
|
|
||||||
if num_of_rows == 0:
|
|
||||||
return None, 0
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
|
|
||||||
raise DatabaseError("Invalid data type returned from database")
|
|
||||||
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchRow(result, fields):
|
|
||||||
pblock = ctypes.c_void_p(0)
|
|
||||||
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
if pblock:
|
|
||||||
num_of_rows = 1
|
|
||||||
isMicro = (CTaosInterface.libtaos.taos_result_precision(
|
|
||||||
result) == FieldType.C_TIMESTAMP_MICRO)
|
|
||||||
blocks = [None] * len(fields)
|
|
||||||
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
|
|
||||||
fieldLen = [
|
|
||||||
ele for ele in ctypes.cast(
|
|
||||||
fieldL, ctypes.POINTER(
|
|
||||||
ctypes.c_int))[
|
|
||||||
:len(fields)]]
|
|
||||||
for i in range(len(fields)):
|
|
||||||
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
|
|
||||||
if fields[i]['type'] not in _CONVERT_FUNC:
|
|
||||||
raise DatabaseError(
|
|
||||||
"Invalid data type returned from database")
|
|
||||||
if data is None:
|
|
||||||
blocks[i] = [None]
|
|
||||||
else:
|
|
||||||
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
|
|
||||||
data, num_of_rows, fieldLen[i], isMicro)
|
|
||||||
else:
|
|
||||||
return None, 0
|
|
||||||
return blocks, abs(num_of_rows)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def freeResult(result):
|
|
||||||
CTaosInterface.libtaos.taos_free_result(result)
|
|
||||||
result.value = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fieldsCount(result):
|
|
||||||
return CTaosInterface.libtaos.taos_field_count(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fetchFields(result):
|
|
||||||
return CTaosInterface.libtaos.taos_fetch_fields(result)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def fetchRow(result, fields):
|
|
||||||
# l = []
|
|
||||||
# row = CTaosInterface.libtaos.taos_fetch_row(result)
|
|
||||||
# if not row:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# for i in range(len(fields)):
|
|
||||||
# l.append(CTaosInterface.getDataValue(
|
|
||||||
# row[i], fields[i]['type'], fields[i]['bytes']))
|
|
||||||
|
|
||||||
# return tuple(l)
|
|
||||||
|
|
||||||
# @staticmethod
|
|
||||||
# def getDataValue(data, dtype, byte):
|
|
||||||
# '''
|
|
||||||
# '''
|
|
||||||
# if not data:
|
|
||||||
# return None
|
|
||||||
|
|
||||||
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
|
|
||||||
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
|
|
||||||
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
|
|
||||||
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
|
|
||||||
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errno(result):
|
|
||||||
"""Return the error number.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errno(result)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def errStr(result):
|
|
||||||
"""Return the error styring
|
|
||||||
"""
|
|
||||||
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
cinter = CTaosInterface()
|
|
||||||
conn = cinter.connect()
|
|
||||||
result = cinter.query(conn, 'show databases')
|
|
||||||
|
|
||||||
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
|
|
||||||
|
|
||||||
fields = CTaosInterface.useResult(result)
|
|
||||||
|
|
||||||
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
|
|
||||||
print(data)
|
|
||||||
|
|
||||||
cinter.freeResult(result)
|
|
||||||
cinter.close(conn)
|
|
|
@ -1,96 +0,0 @@
|
||||||
from .cursor import TDengineCursor
|
|
||||||
from .subscription import TDengineSubscription
|
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineConnection(object):
|
|
||||||
""" TDengine connection object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self._conn = None
|
|
||||||
self._host = None
|
|
||||||
self._user = "root"
|
|
||||||
self._password = "taosdata"
|
|
||||||
self._database = None
|
|
||||||
self._port = 0
|
|
||||||
self._config = None
|
|
||||||
self._chandle = None
|
|
||||||
|
|
||||||
if len(kwargs) > 0:
|
|
||||||
self.config(**kwargs)
|
|
||||||
|
|
||||||
def config(self, **kwargs):
|
|
||||||
# host
|
|
||||||
if 'host' in kwargs:
|
|
||||||
self._host = kwargs['host']
|
|
||||||
|
|
||||||
# user
|
|
||||||
if 'user' in kwargs:
|
|
||||||
self._user = kwargs['user']
|
|
||||||
|
|
||||||
# password
|
|
||||||
if 'password' in kwargs:
|
|
||||||
self._password = kwargs['password']
|
|
||||||
|
|
||||||
# database
|
|
||||||
if 'database' in kwargs:
|
|
||||||
self._database = kwargs['database']
|
|
||||||
|
|
||||||
# port
|
|
||||||
if 'port' in kwargs:
|
|
||||||
self._port = kwargs['port']
|
|
||||||
|
|
||||||
# config
|
|
||||||
if 'config' in kwargs:
|
|
||||||
self._config = kwargs['config']
|
|
||||||
|
|
||||||
self._chandle = CTaosInterface(self._config)
|
|
||||||
self._conn = self._chandle.connect(
|
|
||||||
self._host,
|
|
||||||
self._user,
|
|
||||||
self._password,
|
|
||||||
self._database,
|
|
||||||
self._port)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close current connection.
|
|
||||||
"""
|
|
||||||
return CTaosInterface.close(self._conn)
|
|
||||||
|
|
||||||
def subscribe(self, restart, topic, sql, interval):
|
|
||||||
"""Create a subscription.
|
|
||||||
"""
|
|
||||||
if self._conn is None:
|
|
||||||
return None
|
|
||||||
sub = CTaosInterface.subscribe(
|
|
||||||
self._conn, restart, topic, sql, interval)
|
|
||||||
return TDengineSubscription(sub)
|
|
||||||
|
|
||||||
def cursor(self):
|
|
||||||
"""Return a new Cursor object using the connection.
|
|
||||||
"""
|
|
||||||
return TDengineCursor(self)
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Commit any pending transaction to the database.
|
|
||||||
|
|
||||||
Since TDengine do not support transactions, the implement is void functionality.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Void functionality
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def clear_result_set(self):
|
|
||||||
"""Clear unused result set on this connection.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
conn = TDengineConnection(host='192.168.1.107')
|
|
||||||
conn.close()
|
|
||||||
print("Hello world")
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""Constants in TDengine python
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .dbapi import *
|
|
||||||
|
|
||||||
|
|
||||||
class FieldType(object):
|
|
||||||
"""TDengine Field Types
|
|
||||||
"""
|
|
||||||
# type_code
|
|
||||||
C_NULL = 0
|
|
||||||
C_BOOL = 1
|
|
||||||
C_TINYINT = 2
|
|
||||||
C_SMALLINT = 3
|
|
||||||
C_INT = 4
|
|
||||||
C_BIGINT = 5
|
|
||||||
C_FLOAT = 6
|
|
||||||
C_DOUBLE = 7
|
|
||||||
C_BINARY = 8
|
|
||||||
C_TIMESTAMP = 9
|
|
||||||
C_NCHAR = 10
|
|
||||||
C_TINYINT_UNSIGNED = 11
|
|
||||||
C_SMALLINT_UNSIGNED = 12
|
|
||||||
C_INT_UNSIGNED = 13
|
|
||||||
C_BIGINT_UNSIGNED = 14
|
|
||||||
# NULL value definition
|
|
||||||
# NOTE: These values should change according to C definition in tsdb.h
|
|
||||||
C_BOOL_NULL = 0x02
|
|
||||||
C_TINYINT_NULL = -128
|
|
||||||
C_TINYINT_UNSIGNED_NULL = 255
|
|
||||||
C_SMALLINT_NULL = -32768
|
|
||||||
C_SMALLINT_UNSIGNED_NULL = 65535
|
|
||||||
C_INT_NULL = -2147483648
|
|
||||||
C_INT_UNSIGNED_NULL = 4294967295
|
|
||||||
C_BIGINT_NULL = -9223372036854775808
|
|
||||||
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
|
|
||||||
C_FLOAT_NULL = float('nan')
|
|
||||||
C_DOUBLE_NULL = float('nan')
|
|
||||||
C_BINARY_NULL = bytearray([int('0xff', 16)])
|
|
||||||
# Timestamp precision definition
|
|
||||||
C_TIMESTAMP_MILLI = 0
|
|
||||||
C_TIMESTAMP_MICRO = 1
|
|
|
@ -1,220 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
from .constants import FieldType
|
|
||||||
|
|
||||||
# querySeqNum = 0
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineCursor(object):
|
|
||||||
"""Database cursor which is used to manage the context of a fetch operation.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
.description: Read-only attribute consists of 7-item sequences:
|
|
||||||
|
|
||||||
> name (mondatory)
|
|
||||||
> type_code (mondatory)
|
|
||||||
> display_size
|
|
||||||
> internal_size
|
|
||||||
> precision
|
|
||||||
> scale
|
|
||||||
> null_ok
|
|
||||||
|
|
||||||
This attribute will be None for operations that do not return rows or
|
|
||||||
if the cursor has not had an operation invoked via the .execute*() method yet.
|
|
||||||
|
|
||||||
.rowcount:This read-only attribute specifies the number of rows that the last
|
|
||||||
.execute*() produced (for DQL statements like SELECT) or affected
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
self._connection = None
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
self._logfile = ""
|
|
||||||
|
|
||||||
if connection is not None:
|
|
||||||
self._connection = connection
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetch iterator")
|
|
||||||
|
|
||||||
if self._block_rows <= self._block_iter:
|
|
||||||
block, self._block_rows = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
if self._block_rows == 0:
|
|
||||||
raise StopIteration
|
|
||||||
self._block = list(map(tuple, zip(*block)))
|
|
||||||
self._block_iter = 0
|
|
||||||
|
|
||||||
data = self._block[self._block_iter]
|
|
||||||
self._block_iter += 1
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self):
|
|
||||||
"""Return the description of the object.
|
|
||||||
"""
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rowcount(self):
|
|
||||||
"""Return the rowcount of the object
|
|
||||||
"""
|
|
||||||
return self._rowcount
|
|
||||||
|
|
||||||
@property
|
|
||||||
def affected_rows(self):
|
|
||||||
"""Return the affected_rows of the object
|
|
||||||
"""
|
|
||||||
return self._affected_rows
|
|
||||||
|
|
||||||
def callproc(self, procname, *args):
|
|
||||||
"""Call a stored database procedure with the given name.
|
|
||||||
|
|
||||||
Void functionality since no stored procedures.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close the cursor.
|
|
||||||
"""
|
|
||||||
if self._connection is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
self._connection = None
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def execute(self, operation, params=None):
|
|
||||||
"""Prepare and execute a database operation (query or command).
|
|
||||||
"""
|
|
||||||
if not operation:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._connection:
|
|
||||||
# TODO : change the exception raised here
|
|
||||||
raise ProgrammingError("Cursor is not connected")
|
|
||||||
|
|
||||||
self._reset_result()
|
|
||||||
|
|
||||||
stmt = operation
|
|
||||||
if params is not None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
self._result = CTaosInterface.query(self._connection._conn, stmt)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno == 0:
|
|
||||||
if CTaosInterface.fieldsCount(self._result) == 0:
|
|
||||||
self._affected_rows += CTaosInterface.affectedRows(
|
|
||||||
self._result)
|
|
||||||
return CTaosInterface.affectedRows(self._result)
|
|
||||||
else:
|
|
||||||
self._fields = CTaosInterface.useResult(self._result)
|
|
||||||
return self._handle_result()
|
|
||||||
else:
|
|
||||||
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
|
|
||||||
|
|
||||||
def executemany(self, operation, seq_of_parameters):
|
|
||||||
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchone(self):
|
|
||||||
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchmany(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetchall_row(self):
|
|
||||||
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
|
|
||||||
"""
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchRow(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def fetchall(self):
|
|
||||||
if self._result is None or self._fields is None:
|
|
||||||
raise OperationalError("Invalid use of fetchall")
|
|
||||||
|
|
||||||
buffer = [[] for i in range(len(self._fields))]
|
|
||||||
self._rowcount = 0
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(
|
|
||||||
self._result, self._fields)
|
|
||||||
errno = CTaosInterface.libtaos.taos_errno(self._result)
|
|
||||||
if errno != 0:
|
|
||||||
raise ProgrammingError(
|
|
||||||
CTaosInterface.errStr(
|
|
||||||
self._result), errno)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
self._rowcount += num_of_fields
|
|
||||||
for i in range(len(self._fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def nextset(self):
|
|
||||||
"""
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setinputsize(self, sizes):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setutputsize(self, size, column=None):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _reset_result(self):
|
|
||||||
"""Reset the result to unused version.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
self._rowcount = -1
|
|
||||||
if self._result is not None:
|
|
||||||
CTaosInterface.freeResult(self._result)
|
|
||||||
self._result = None
|
|
||||||
self._fields = None
|
|
||||||
self._block = None
|
|
||||||
self._block_rows = -1
|
|
||||||
self._block_iter = 0
|
|
||||||
self._affected_rows = 0
|
|
||||||
|
|
||||||
def _handle_result(self):
|
|
||||||
"""Handle the return result from query.
|
|
||||||
"""
|
|
||||||
self._description = []
|
|
||||||
for ele in self._fields:
|
|
||||||
self._description.append(
|
|
||||||
(ele['name'], ele['type'], None, None, None, None, False))
|
|
||||||
|
|
||||||
return self._result
|
|
|
@ -1,44 +0,0 @@
|
||||||
"""Type Objects and Constructors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DBAPITypeObject(object):
|
|
||||||
def __init__(self, *values):
|
|
||||||
self.values = values
|
|
||||||
|
|
||||||
def __com__(self, other):
|
|
||||||
if other in self.values:
|
|
||||||
return 0
|
|
||||||
if other < self.values:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
Date = datetime.date
|
|
||||||
Time = datetime.time
|
|
||||||
Timestamp = datetime.datetime
|
|
||||||
|
|
||||||
|
|
||||||
def DataFromTicks(ticks):
|
|
||||||
return Date(*time.localtime(ticks)[:3])
|
|
||||||
|
|
||||||
|
|
||||||
def TimeFromTicks(ticks):
|
|
||||||
return Time(*time.localtime(ticks)[3:6])
|
|
||||||
|
|
||||||
|
|
||||||
def TimestampFromTicks(ticks):
|
|
||||||
return Timestamp(*time.localtime(ticks)[:6])
|
|
||||||
|
|
||||||
|
|
||||||
Binary = bytes
|
|
||||||
|
|
||||||
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
|
|
||||||
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
|
|
||||||
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
|
|
||||||
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
|
|
||||||
# ROWID = DBAPITypeObject()
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""Python exceptions
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
def __init__(self, msg=None, errno=None):
|
|
||||||
self.msg = msg
|
|
||||||
self._full_msg = self.msg
|
|
||||||
self.errno = errno
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self._full_msg
|
|
||||||
|
|
||||||
|
|
||||||
class Warning(Exception):
|
|
||||||
"""Exception raised for important warnings like data truncations while inserting.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InterfaceError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database interface rather than the database itself.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseError(Error):
|
|
||||||
"""Exception raised for errors that are related to the database.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DataError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OperationalError(DatabaseError):
|
|
||||||
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IntegrityError(DatabaseError):
|
|
||||||
"""Exception raised when the relational integrity of the database is affected.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(DatabaseError):
|
|
||||||
"""Exception raised when the database encounters an internal error.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProgrammingError(DatabaseError):
|
|
||||||
"""Exception raised for programming errors.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotSupportedError(DatabaseError):
|
|
||||||
"""Exception raised in case a method or database API was used which is not supported by the database,.
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -1,57 +0,0 @@
|
||||||
from .cinterface import CTaosInterface
|
|
||||||
from .error import *
|
|
||||||
|
|
||||||
|
|
||||||
class TDengineSubscription(object):
|
|
||||||
"""TDengine subscription object
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, sub):
|
|
||||||
self._sub = sub
|
|
||||||
|
|
||||||
def consume(self):
|
|
||||||
"""Consume rows of a subscription
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
raise OperationalError("Invalid use of consume")
|
|
||||||
|
|
||||||
result, fields = CTaosInterface.consume(self._sub)
|
|
||||||
buffer = [[] for i in range(len(fields))]
|
|
||||||
while True:
|
|
||||||
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
|
|
||||||
if num_of_fields == 0:
|
|
||||||
break
|
|
||||||
for i in range(len(fields)):
|
|
||||||
buffer[i].extend(block[i])
|
|
||||||
|
|
||||||
self.fields = fields
|
|
||||||
return list(map(tuple, zip(*buffer)))
|
|
||||||
|
|
||||||
def close(self, keepProgress=True):
|
|
||||||
"""Close the Subscription.
|
|
||||||
"""
|
|
||||||
if self._sub is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
CTaosInterface.unsubscribe(self._sub, keepProgress)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
from .connection import TDengineConnection
|
|
||||||
conn = TDengineConnection(
|
|
||||||
host="127.0.0.1",
|
|
||||||
user="root",
|
|
||||||
password="taosdata",
|
|
||||||
database="test")
|
|
||||||
|
|
||||||
# Generate a cursor object to run SQL commands
|
|
||||||
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
|
|
||||||
|
|
||||||
for i in range(0, 10):
|
|
||||||
data = sub.consume()
|
|
||||||
for d in data:
|
|
||||||
print(d)
|
|
||||||
|
|
||||||
sub.close()
|
|
||||||
conn.close()
|
|
|
@ -215,11 +215,11 @@ int32_t* taosGetErrno();
|
||||||
#define TSDB_CODE_VND_IS_FLOWCTRL TAOS_DEF_ERROR_CODE(0, 0x050C) //"Database memory is full for waiting commit")
|
#define TSDB_CODE_VND_IS_FLOWCTRL TAOS_DEF_ERROR_CODE(0, 0x050C) //"Database memory is full for waiting commit")
|
||||||
#define TSDB_CODE_VND_IS_DROPPING TAOS_DEF_ERROR_CODE(0, 0x050D) //"Database is dropping")
|
#define TSDB_CODE_VND_IS_DROPPING TAOS_DEF_ERROR_CODE(0, 0x050D) //"Database is dropping")
|
||||||
#define TSDB_CODE_VND_IS_BALANCING TAOS_DEF_ERROR_CODE(0, 0x050E) //"Database is balancing")
|
#define TSDB_CODE_VND_IS_BALANCING TAOS_DEF_ERROR_CODE(0, 0x050E) //"Database is balancing")
|
||||||
|
#define TSDB_CODE_VND_IS_CLOSING TAOS_DEF_ERROR_CODE(0, 0x0510) //"Database is closing")
|
||||||
#define TSDB_CODE_VND_NOT_SYNCED TAOS_DEF_ERROR_CODE(0, 0x0511) //"Database suspended")
|
#define TSDB_CODE_VND_NOT_SYNCED TAOS_DEF_ERROR_CODE(0, 0x0511) //"Database suspended")
|
||||||
#define TSDB_CODE_VND_NO_WRITE_AUTH TAOS_DEF_ERROR_CODE(0, 0x0512) //"Database write operation denied")
|
#define TSDB_CODE_VND_NO_WRITE_AUTH TAOS_DEF_ERROR_CODE(0, 0x0512) //"Database write operation denied")
|
||||||
#define TSDB_CODE_VND_IS_SYNCING TAOS_DEF_ERROR_CODE(0, 0x0513) //"Database is syncing")
|
#define TSDB_CODE_VND_IS_SYNCING TAOS_DEF_ERROR_CODE(0, 0x0513) //"Database is syncing")
|
||||||
#define TSDB_CODE_VND_INVALID_TSDB_STATE TAOS_DEF_ERROR_CODE(0, 0x0514) //"Invalid tsdb state")
|
#define TSDB_CODE_VND_INVALID_TSDB_STATE TAOS_DEF_ERROR_CODE(0, 0x0514) //"Invalid tsdb state")
|
||||||
#define TSDB_CODE_VND_IS_CLOSING TAOS_DEF_ERROR_CODE(0, 0x0515) //"Database is closing")
|
|
||||||
|
|
||||||
// tsdb
|
// tsdb
|
||||||
#define TSDB_CODE_TDB_INVALID_TABLE_ID TAOS_DEF_ERROR_CODE(0, 0x0600) //"Invalid table ID")
|
#define TSDB_CODE_TDB_INVALID_TABLE_ID TAOS_DEF_ERROR_CODE(0, 0x0600) //"Invalid table ID")
|
||||||
|
|
|
@ -364,7 +364,7 @@ typedef struct SDbs_S {
|
||||||
typedef struct SpecifiedQueryInfo_S {
|
typedef struct SpecifiedQueryInfo_S {
|
||||||
uint64_t queryInterval; // 0: unlimit > 0 loop/s
|
uint64_t queryInterval; // 0: unlimit > 0 loop/s
|
||||||
uint32_t concurrent;
|
uint32_t concurrent;
|
||||||
uint64_t sqlCount;
|
int sqlCount;
|
||||||
uint32_t asyncMode; // 0: sync, 1: async
|
uint32_t asyncMode; // 0: sync, 1: async
|
||||||
uint64_t subscribeInterval; // ms
|
uint64_t subscribeInterval; // ms
|
||||||
uint64_t queryTimes;
|
uint64_t queryTimes;
|
||||||
|
@ -373,6 +373,7 @@ typedef struct SpecifiedQueryInfo_S {
|
||||||
char sql[MAX_QUERY_SQL_COUNT][MAX_QUERY_SQL_LENGTH+1];
|
char sql[MAX_QUERY_SQL_COUNT][MAX_QUERY_SQL_LENGTH+1];
|
||||||
char result[MAX_QUERY_SQL_COUNT][MAX_FILE_NAME_LEN+1];
|
char result[MAX_QUERY_SQL_COUNT][MAX_FILE_NAME_LEN+1];
|
||||||
int resubAfterConsume[MAX_QUERY_SQL_COUNT];
|
int resubAfterConsume[MAX_QUERY_SQL_COUNT];
|
||||||
|
int endAfterConsume[MAX_QUERY_SQL_COUNT];
|
||||||
TAOS_SUB* tsub[MAX_QUERY_SQL_COUNT];
|
TAOS_SUB* tsub[MAX_QUERY_SQL_COUNT];
|
||||||
char topic[MAX_QUERY_SQL_COUNT][32];
|
char topic[MAX_QUERY_SQL_COUNT][32];
|
||||||
int consumed[MAX_QUERY_SQL_COUNT];
|
int consumed[MAX_QUERY_SQL_COUNT];
|
||||||
|
@ -391,10 +392,11 @@ typedef struct SuperQueryInfo_S {
|
||||||
uint64_t queryTimes;
|
uint64_t queryTimes;
|
||||||
int64_t childTblCount;
|
int64_t childTblCount;
|
||||||
char childTblPrefix[MAX_TB_NAME_SIZE];
|
char childTblPrefix[MAX_TB_NAME_SIZE];
|
||||||
uint64_t sqlCount;
|
int sqlCount;
|
||||||
char sql[MAX_QUERY_SQL_COUNT][MAX_QUERY_SQL_LENGTH+1];
|
char sql[MAX_QUERY_SQL_COUNT][MAX_QUERY_SQL_LENGTH+1];
|
||||||
char result[MAX_QUERY_SQL_COUNT][MAX_FILE_NAME_LEN+1];
|
char result[MAX_QUERY_SQL_COUNT][MAX_FILE_NAME_LEN+1];
|
||||||
int resubAfterConsume;
|
int resubAfterConsume;
|
||||||
|
int endAfterConsume;
|
||||||
TAOS_SUB* tsub[MAX_QUERY_SQL_COUNT];
|
TAOS_SUB* tsub[MAX_QUERY_SQL_COUNT];
|
||||||
|
|
||||||
char* childTblName;
|
char* childTblName;
|
||||||
|
@ -1717,7 +1719,7 @@ static void printfQueryMeta() {
|
||||||
|
|
||||||
if ((SUBSCRIBE_TEST == g_args.test_mode) || (QUERY_TEST == g_args.test_mode)) {
|
if ((SUBSCRIBE_TEST == g_args.test_mode) || (QUERY_TEST == g_args.test_mode)) {
|
||||||
printf("specified table query info: \n");
|
printf("specified table query info: \n");
|
||||||
printf("sqlCount: \033[33m%"PRIu64"\033[0m\n",
|
printf("sqlCount: \033[33m%d\033[0m\n",
|
||||||
g_queryInfo.specifiedQueryInfo.sqlCount);
|
g_queryInfo.specifiedQueryInfo.sqlCount);
|
||||||
if (g_queryInfo.specifiedQueryInfo.sqlCount > 0) {
|
if (g_queryInfo.specifiedQueryInfo.sqlCount > 0) {
|
||||||
printf("specified tbl query times:\n");
|
printf("specified tbl query times:\n");
|
||||||
|
@ -1737,15 +1739,15 @@ static void printfQueryMeta() {
|
||||||
printf("keepProgress: \033[33m%d\033[0m\n",
|
printf("keepProgress: \033[33m%d\033[0m\n",
|
||||||
g_queryInfo.specifiedQueryInfo.subscribeKeepProgress);
|
g_queryInfo.specifiedQueryInfo.subscribeKeepProgress);
|
||||||
|
|
||||||
for (uint64_t i = 0; i < g_queryInfo.specifiedQueryInfo.sqlCount; i++) {
|
for (int i = 0; i < g_queryInfo.specifiedQueryInfo.sqlCount; i++) {
|
||||||
printf(" sql[%"PRIu64"]: \033[33m%s\033[0m\n",
|
printf(" sql[%d]: \033[33m%s\033[0m\n",
|
||||||
i, g_queryInfo.specifiedQueryInfo.sql[i]);
|
i, g_queryInfo.specifiedQueryInfo.sql[i]);
|
||||||
}
|
}
|
||||||
printf("\n");
|
printf("\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
printf("super table query info:\n");
|
printf("super table query info:\n");
|
||||||
printf("sqlCount: \033[33m%"PRIu64"\033[0m\n",
|
printf("sqlCount: \033[33m%d\033[0m\n",
|
||||||
g_queryInfo.superQueryInfo.sqlCount);
|
g_queryInfo.superQueryInfo.sqlCount);
|
||||||
|
|
||||||
if (g_queryInfo.superQueryInfo.sqlCount > 0) {
|
if (g_queryInfo.superQueryInfo.sqlCount > 0) {
|
||||||
|
@ -4197,7 +4199,7 @@ static bool getMetaFromQueryJsonFile(cJSON* root) {
|
||||||
if (concurrent && concurrent->type == cJSON_Number) {
|
if (concurrent && concurrent->type == cJSON_Number) {
|
||||||
if (concurrent->valueint <= 0) {
|
if (concurrent->valueint <= 0) {
|
||||||
errorPrint(
|
errorPrint(
|
||||||
"%s() LN%d, query sqlCount %"PRIu64" or concurrent %d is not correct.\n",
|
"%s() LN%d, query sqlCount %d or concurrent %d is not correct.\n",
|
||||||
__func__, __LINE__,
|
__func__, __LINE__,
|
||||||
g_queryInfo.specifiedQueryInfo.sqlCount,
|
g_queryInfo.specifiedQueryInfo.sqlCount,
|
||||||
g_queryInfo.specifiedQueryInfo.concurrent);
|
g_queryInfo.specifiedQueryInfo.concurrent);
|
||||||
|
@ -4296,6 +4298,17 @@ static bool getMetaFromQueryJsonFile(cJSON* root) {
|
||||||
tstrncpy(g_queryInfo.specifiedQueryInfo.sql[j],
|
tstrncpy(g_queryInfo.specifiedQueryInfo.sql[j],
|
||||||
sqlStr->valuestring, MAX_QUERY_SQL_LENGTH);
|
sqlStr->valuestring, MAX_QUERY_SQL_LENGTH);
|
||||||
|
|
||||||
|
cJSON* endAfterConsume =
|
||||||
|
cJSON_GetObjectItem(specifiedQuery, "endAfterConsume");
|
||||||
|
if (endAfterConsume
|
||||||
|
&& endAfterConsume->type == cJSON_Number) {
|
||||||
|
g_queryInfo.specifiedQueryInfo.endAfterConsume[j]
|
||||||
|
= endAfterConsume->valueint;
|
||||||
|
} else if (!endAfterConsume) {
|
||||||
|
// default value is -1, which mean infinite loop
|
||||||
|
g_queryInfo.specifiedQueryInfo.endAfterConsume[j] = -1;
|
||||||
|
}
|
||||||
|
|
||||||
cJSON* resubAfterConsume =
|
cJSON* resubAfterConsume =
|
||||||
cJSON_GetObjectItem(specifiedQuery, "resubAfterConsume");
|
cJSON_GetObjectItem(specifiedQuery, "resubAfterConsume");
|
||||||
if (resubAfterConsume
|
if (resubAfterConsume
|
||||||
|
@ -4303,9 +4316,8 @@ static bool getMetaFromQueryJsonFile(cJSON* root) {
|
||||||
g_queryInfo.specifiedQueryInfo.resubAfterConsume[j]
|
g_queryInfo.specifiedQueryInfo.resubAfterConsume[j]
|
||||||
= resubAfterConsume->valueint;
|
= resubAfterConsume->valueint;
|
||||||
} else if (!resubAfterConsume) {
|
} else if (!resubAfterConsume) {
|
||||||
//printf("failed to read json, subscribe interval no found\n");
|
// default value is -1, which mean do not resub
|
||||||
//goto PARSE_OVER;
|
g_queryInfo.specifiedQueryInfo.resubAfterConsume[j] = -1;
|
||||||
g_queryInfo.specifiedQueryInfo.resubAfterConsume[j] = 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cJSON *result = cJSON_GetObjectItem(sql, "result");
|
cJSON *result = cJSON_GetObjectItem(sql, "result");
|
||||||
|
@ -4449,16 +4461,26 @@ static bool getMetaFromQueryJsonFile(cJSON* root) {
|
||||||
g_queryInfo.superQueryInfo.subscribeKeepProgress = 0;
|
g_queryInfo.superQueryInfo.subscribeKeepProgress = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cJSON* superEndAfterConsume =
|
||||||
|
cJSON_GetObjectItem(superQuery, "endAfterConsume");
|
||||||
|
if (superEndAfterConsume
|
||||||
|
&& superEndAfterConsume->type == cJSON_Number) {
|
||||||
|
g_queryInfo.superQueryInfo.endAfterConsume =
|
||||||
|
superEndAfterConsume->valueint;
|
||||||
|
} else if (!superEndAfterConsume) {
|
||||||
|
// default value is -1, which mean do not resub
|
||||||
|
g_queryInfo.superQueryInfo.endAfterConsume = -1;
|
||||||
|
}
|
||||||
|
|
||||||
cJSON* superResubAfterConsume =
|
cJSON* superResubAfterConsume =
|
||||||
cJSON_GetObjectItem(superQuery, "resubAfterConsume");
|
cJSON_GetObjectItem(superQuery, "endAfterConsume");
|
||||||
if (superResubAfterConsume
|
if (superResubAfterConsume
|
||||||
&& superResubAfterConsume->type == cJSON_Number) {
|
&& superResubAfterConsume->type == cJSON_Number) {
|
||||||
g_queryInfo.superQueryInfo.resubAfterConsume =
|
g_queryInfo.superQueryInfo.endAfterConsume =
|
||||||
superResubAfterConsume->valueint;
|
superResubAfterConsume->valueint;
|
||||||
} else if (!superResubAfterConsume) {
|
} else if (!superResubAfterConsume) {
|
||||||
//printf("failed to read json, subscribe interval no found\n");
|
// default value is -1, which mean do not resub
|
||||||
////goto PARSE_OVER;
|
g_queryInfo.superQueryInfo.endAfterConsume = -1;
|
||||||
g_queryInfo.superQueryInfo.resubAfterConsume = 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// supert table sqls
|
// supert table sqls
|
||||||
|
@ -6239,7 +6261,7 @@ static void *specifiedTableQuery(void *sarg) {
|
||||||
uint64_t lastPrintTime = taosGetTimestampMs();
|
uint64_t lastPrintTime = taosGetTimestampMs();
|
||||||
uint64_t startTs = taosGetTimestampMs();
|
uint64_t startTs = taosGetTimestampMs();
|
||||||
|
|
||||||
if (g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq] != NULL) {
|
if (g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq][0] != '\0') {
|
||||||
sprintf(pThreadInfo->filePath, "%s-%d",
|
sprintf(pThreadInfo->filePath, "%s-%d",
|
||||||
g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq],
|
g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq],
|
||||||
pThreadInfo->threadID);
|
pThreadInfo->threadID);
|
||||||
|
@ -6340,7 +6362,7 @@ static void *superTableQuery(void *sarg) {
|
||||||
for (int j = 0; j < g_queryInfo.superQueryInfo.sqlCount; j++) {
|
for (int j = 0; j < g_queryInfo.superQueryInfo.sqlCount; j++) {
|
||||||
memset(sqlstr,0,sizeof(sqlstr));
|
memset(sqlstr,0,sizeof(sqlstr));
|
||||||
replaceChildTblName(g_queryInfo.superQueryInfo.sql[j], sqlstr, i);
|
replaceChildTblName(g_queryInfo.superQueryInfo.sql[j], sqlstr, i);
|
||||||
if (g_queryInfo.superQueryInfo.result[j] != NULL) {
|
if (g_queryInfo.superQueryInfo.result[j][0] != '\0') {
|
||||||
sprintf(pThreadInfo->filePath, "%s-%d",
|
sprintf(pThreadInfo->filePath, "%s-%d",
|
||||||
g_queryInfo.superQueryInfo.result[j],
|
g_queryInfo.superQueryInfo.result[j],
|
||||||
pThreadInfo->threadID);
|
pThreadInfo->threadID);
|
||||||
|
@ -6679,7 +6701,10 @@ static void *superSubscribe(void *sarg) {
|
||||||
|
|
||||||
uint64_t st = 0, et = 0;
|
uint64_t st = 0, et = 0;
|
||||||
|
|
||||||
while(1) {
|
while ((g_queryInfo.superQueryInfo.endAfterConsume == -1)
|
||||||
|
|| (g_queryInfo.superQueryInfo.endAfterConsume <
|
||||||
|
consumed[pThreadInfo->end_table_to - pThreadInfo->start_table_from])) {
|
||||||
|
|
||||||
for (uint64_t i = pThreadInfo->start_table_from;
|
for (uint64_t i = pThreadInfo->start_table_from;
|
||||||
i <= pThreadInfo->end_table_to; i++) {
|
i <= pThreadInfo->end_table_to; i++) {
|
||||||
tsubSeq = i - pThreadInfo->start_table_from;
|
tsubSeq = i - pThreadInfo->start_table_from;
|
||||||
|
@ -6708,7 +6733,7 @@ static void *superSubscribe(void *sarg) {
|
||||||
}
|
}
|
||||||
consumed[tsubSeq] ++;
|
consumed[tsubSeq] ++;
|
||||||
|
|
||||||
if ((g_queryInfo.superQueryInfo.subscribeKeepProgress)
|
if ((g_queryInfo.superQueryInfo.resubAfterConsume != -1)
|
||||||
&& (consumed[tsubSeq] >=
|
&& (consumed[tsubSeq] >=
|
||||||
g_queryInfo.superQueryInfo.resubAfterConsume)) {
|
g_queryInfo.superQueryInfo.resubAfterConsume)) {
|
||||||
printf("keepProgress:%d, resub super table query: %"PRIu64"\n",
|
printf("keepProgress:%d, resub super table query: %"PRIu64"\n",
|
||||||
|
@ -6771,7 +6796,7 @@ static void *specifiedSubscribe(void *sarg) {
|
||||||
"taosdemo-subscribe-%"PRIu64"-%d",
|
"taosdemo-subscribe-%"PRIu64"-%d",
|
||||||
pThreadInfo->querySeq,
|
pThreadInfo->querySeq,
|
||||||
pThreadInfo->threadID);
|
pThreadInfo->threadID);
|
||||||
if (g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq] != NULL) {
|
if (g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq][0] != '\0') {
|
||||||
sprintf(pThreadInfo->filePath, "%s-%d",
|
sprintf(pThreadInfo->filePath, "%s-%d",
|
||||||
g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq],
|
g_queryInfo.specifiedQueryInfo.result[pThreadInfo->querySeq],
|
||||||
pThreadInfo->threadID);
|
pThreadInfo->threadID);
|
||||||
|
@ -6790,7 +6815,10 @@ static void *specifiedSubscribe(void *sarg) {
|
||||||
// start loop to consume result
|
// start loop to consume result
|
||||||
|
|
||||||
g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] = 0;
|
g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] = 0;
|
||||||
while(1) {
|
while((g_queryInfo.specifiedQueryInfo.endAfterConsume[pThreadInfo->querySeq] == -1)
|
||||||
|
|| (g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] <
|
||||||
|
g_queryInfo.specifiedQueryInfo.endAfterConsume[pThreadInfo->querySeq])) {
|
||||||
|
|
||||||
if (ASYNC_MODE == g_queryInfo.specifiedQueryInfo.asyncMode) {
|
if (ASYNC_MODE == g_queryInfo.specifiedQueryInfo.asyncMode) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -6806,7 +6834,7 @@ static void *specifiedSubscribe(void *sarg) {
|
||||||
}
|
}
|
||||||
|
|
||||||
g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] ++;
|
g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] ++;
|
||||||
if ((g_queryInfo.specifiedQueryInfo.subscribeKeepProgress)
|
if ((g_queryInfo.specifiedQueryInfo.resubAfterConsume[pThreadInfo->querySeq] != -1)
|
||||||
&& (g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] >=
|
&& (g_queryInfo.specifiedQueryInfo.consumed[pThreadInfo->threadID] >=
|
||||||
g_queryInfo.specifiedQueryInfo.resubAfterConsume[pThreadInfo->querySeq])) {
|
g_queryInfo.specifiedQueryInfo.resubAfterConsume[pThreadInfo->querySeq])) {
|
||||||
printf("keepProgress:%d, resub specified query: %"PRIu64"\n",
|
printf("keepProgress:%d, resub specified query: %"PRIu64"\n",
|
||||||
|
@ -6873,12 +6901,12 @@ static int subscribeTestProcess() {
|
||||||
|
|
||||||
//==== create threads for query for specified table
|
//==== create threads for query for specified table
|
||||||
if (g_queryInfo.specifiedQueryInfo.sqlCount <= 0) {
|
if (g_queryInfo.specifiedQueryInfo.sqlCount <= 0) {
|
||||||
debugPrint("%s() LN%d, sepcified query sqlCount %"PRIu64".\n",
|
debugPrint("%s() LN%d, sepcified query sqlCount %d.\n",
|
||||||
__func__, __LINE__,
|
__func__, __LINE__,
|
||||||
g_queryInfo.specifiedQueryInfo.sqlCount);
|
g_queryInfo.specifiedQueryInfo.sqlCount);
|
||||||
} else {
|
} else {
|
||||||
if (g_queryInfo.specifiedQueryInfo.concurrent <= 0) {
|
if (g_queryInfo.specifiedQueryInfo.concurrent <= 0) {
|
||||||
errorPrint("%s() LN%d, sepcified query sqlCount %"PRIu64".\n",
|
errorPrint("%s() LN%d, sepcified query sqlCount %d.\n",
|
||||||
__func__, __LINE__,
|
__func__, __LINE__,
|
||||||
g_queryInfo.specifiedQueryInfo.sqlCount);
|
g_queryInfo.specifiedQueryInfo.sqlCount);
|
||||||
exit(-1);
|
exit(-1);
|
||||||
|
@ -6911,7 +6939,7 @@ static int subscribeTestProcess() {
|
||||||
|
|
||||||
//==== create threads for super table query
|
//==== create threads for super table query
|
||||||
if (g_queryInfo.superQueryInfo.sqlCount <= 0) {
|
if (g_queryInfo.superQueryInfo.sqlCount <= 0) {
|
||||||
debugPrint("%s() LN%d, super table query sqlCount %"PRIu64".\n",
|
debugPrint("%s() LN%d, super table query sqlCount %d.\n",
|
||||||
__func__, __LINE__,
|
__func__, __LINE__,
|
||||||
g_queryInfo.superQueryInfo.sqlCount);
|
g_queryInfo.superQueryInfo.sqlCount);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -649,8 +649,6 @@ static int32_t sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *
|
||||||
dnodeReportStep("mnode-sdb", stepDesc, 0);
|
dnodeReportStep("mnode-sdb", stepDesc, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (qtype == TAOS_QTYPE_QUERY) return sdbPerformDeleteAction(pHead, pTable);
|
|
||||||
|
|
||||||
pthread_mutex_lock(&tsSdbMgmt.mutex);
|
pthread_mutex_lock(&tsSdbMgmt.mutex);
|
||||||
|
|
||||||
if (pHead->version == 0) {
|
if (pHead->version == 0) {
|
||||||
|
@ -712,13 +710,11 @@ static int32_t sdbProcessWrite(void *wparam, void *hparam, int32_t qtype, void *
|
||||||
if (action == SDB_ACTION_INSERT) {
|
if (action == SDB_ACTION_INSERT) {
|
||||||
return sdbPerformInsertAction(pHead, pTable);
|
return sdbPerformInsertAction(pHead, pTable);
|
||||||
} else if (action == SDB_ACTION_DELETE) {
|
} else if (action == SDB_ACTION_DELETE) {
|
||||||
//if (qtype == TAOS_QTYPE_FWD) {
|
if (qtype == TAOS_QTYPE_FWD) {
|
||||||
// Drop database/stable may take a long time and cause a timeout, so we confirm first then reput it into queue
|
// Drop database/stable may take a long time and cause a timeout, so we confirm first
|
||||||
// sdbWriteFwdToQueue(1, hparam, TAOS_QTYPE_QUERY, unused);
|
syncConfirmForward(tsSdbMgmt.sync, pHead->version, TSDB_CODE_SUCCESS, false);
|
||||||
// return TSDB_CODE_SUCCESS;
|
}
|
||||||
//} else {
|
|
||||||
return sdbPerformDeleteAction(pHead, pTable);
|
return sdbPerformDeleteAction(pHead, pTable);
|
||||||
//}
|
|
||||||
} else if (action == SDB_ACTION_UPDATE) {
|
} else if (action == SDB_ACTION_UPDATE) {
|
||||||
return sdbPerformUpdateAction(pHead, pTable);
|
return sdbPerformUpdateAction(pHead, pTable);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1125,8 +1121,11 @@ static void *sdbWorkerFp(void *pWorker) {
|
||||||
sdbConfirmForward(1, pRow, pRow->code);
|
sdbConfirmForward(1, pRow, pRow->code);
|
||||||
} else {
|
} else {
|
||||||
if (qtype == TAOS_QTYPE_FWD) {
|
if (qtype == TAOS_QTYPE_FWD) {
|
||||||
|
int32_t action = pRow->pHead.msgType % 10;
|
||||||
|
if (action != SDB_ACTION_DELETE) {
|
||||||
syncConfirmForward(tsSdbMgmt.sync, pRow->pHead.version, pRow->code, false);
|
syncConfirmForward(tsSdbMgmt.sync, pRow->pHead.version, pRow->code, false);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
sdbFreeFromQueue(pRow);
|
sdbFreeFromQueue(pRow);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -924,10 +924,7 @@ static int tsdbRemoveTableFromIndex(STsdbMeta *pMeta, STable *pTable) {
|
||||||
STable *pSTable = pTable->pSuper;
|
STable *pSTable = pTable->pSuper;
|
||||||
ASSERT(pSTable != NULL);
|
ASSERT(pSTable != NULL);
|
||||||
|
|
||||||
STSchema *pSchema = tsdbGetTableTagSchema(pTable);
|
char* key = getTagIndexKey(pTable);
|
||||||
STColumn *pCol = schemaColAt(pSchema, DEFAULT_TAG_INDEX_COLUMN);
|
|
||||||
|
|
||||||
char * key = tdGetKVRowValOfCol(pTable->tagVal, pCol->colId);
|
|
||||||
SArray *res = tSkipListGet(pSTable->pIndex, key);
|
SArray *res = tSkipListGet(pSTable->pIndex, key);
|
||||||
|
|
||||||
size_t size = taosArrayGetSize(res);
|
size_t size = taosArrayGetSize(res);
|
||||||
|
|
|
@ -185,6 +185,8 @@ static FORCE_INLINE int32_t tGetNumericStringType(const SStrToken* pToken) {
|
||||||
|
|
||||||
void taosCleanupKeywordsTable();
|
void taosCleanupKeywordsTable();
|
||||||
|
|
||||||
|
SStrToken taosTokenDup(SStrToken* pToken, char* buf, int32_t len);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -227,6 +227,7 @@ TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_FULL, "Database memory is fu
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_FLOWCTRL, "Database memory is full for waiting commit")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_FLOWCTRL, "Database memory is full for waiting commit")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_DROPPING, "Database is dropping")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_DROPPING, "Database is dropping")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_BALANCING, "Database is balancing")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_BALANCING, "Database is balancing")
|
||||||
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_CLOSING, "Database is closing")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, "Database suspended")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NOT_SYNCED, "Database suspended")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, "Database write operation denied")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_NO_WRITE_AUTH, "Database write operation denied")
|
||||||
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_SYNCING, "Database is syncing")
|
TAOS_DEFINE_ERROR(TSDB_CODE_VND_IS_SYNCING, "Database is syncing")
|
||||||
|
|
|
@ -664,3 +664,15 @@ void taosCleanupKeywordsTable() {
|
||||||
taosHashCleanup(m);
|
taosHashCleanup(m);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SStrToken taosTokenDup(SStrToken* pToken, char* buf, int32_t len) {
|
||||||
|
assert(pToken != NULL && buf != NULL);
|
||||||
|
SStrToken token = *pToken;
|
||||||
|
token.z = buf;
|
||||||
|
|
||||||
|
assert(len > token.n);
|
||||||
|
strncpy(token.z, pToken->z, pToken->n);
|
||||||
|
token.z[token.n] = 0;
|
||||||
|
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
|
@ -8,8 +8,8 @@
|
||||||
|
|
||||||
3. mkdir debug; cd debug; cmake ..; make ; sudo make install
|
3. mkdir debug; cd debug; cmake ..; make ; sudo make install
|
||||||
|
|
||||||
4. pip install ../src/connector/python/linux/python2 ; pip3 install
|
4. pip install ../src/connector/python ; pip3 install
|
||||||
../src/connector/python/linux/python3
|
../src/connector/python
|
||||||
|
|
||||||
5. pip install numpy; pip3 install numpy (numpy is required only if you need to run querySort.py)
|
5. pip install numpy; pip3 install numpy (numpy is required only if you need to run querySort.py)
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ def pre_test(){
|
||||||
cmake .. > /dev/null
|
cmake .. > /dev/null
|
||||||
make > /dev/null
|
make > /dev/null
|
||||||
make install > /dev/null
|
make install > /dev/null
|
||||||
pip3 install ${WKC}/src/connector/python/linux/python3/
|
pip3 install ${WKC}/src/connector/python
|
||||||
'''
|
'''
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,7 @@ fi
|
||||||
PYTHON_EXEC=python3.8
|
PYTHON_EXEC=python3.8
|
||||||
|
|
||||||
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
||||||
export PYTHONPATH=$(pwd)/../../src/connector/python/linux/python3:$(pwd)
|
export PYTHONPATH=$(pwd)/../../src/connector/python:$(pwd)
|
||||||
|
|
||||||
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
||||||
|
|
|
@ -48,7 +48,7 @@ fi
|
||||||
PYTHON_EXEC=python3.8
|
PYTHON_EXEC=python3.8
|
||||||
|
|
||||||
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
||||||
export PYTHONPATH=$(pwd)/../../src/connector/python/linux/python3:$(pwd)
|
export PYTHONPATH=$(pwd)/../../src/connector/python:$(pwd)
|
||||||
|
|
||||||
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
###################################################################
|
###################################################################
|
||||||
# install pip
|
# install pip
|
||||||
# pip install src/connector/python/linux/python2/
|
# pip install src/connector/python/
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
|
|
|
@ -48,7 +48,7 @@ fi
|
||||||
PYTHON_EXEC=python3.8
|
PYTHON_EXEC=python3.8
|
||||||
|
|
||||||
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
||||||
export PYTHONPATH=$(pwd)/../../src/connector/python/linux/python3:$(pwd)
|
export PYTHONPATH=$(pwd)/../../src/connector/python:$(pwd)
|
||||||
|
|
||||||
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# 2. No files are needed outside the development tree, everything is done in the local source code directory
|
# 2. No files are needed outside the development tree, everything is done in the local source code directory
|
||||||
|
|
||||||
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
|
||||||
export PYTHONPATH=$(pwd)/../../src/connector/python/linux/python3
|
export PYTHONPATH=$(pwd)/../../src/connector/python
|
||||||
|
|
||||||
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
# Then let us set up the library path so that our compiled SO file can be loaded by Python
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/../../build/build/lib
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/../../build/build/lib
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
###################################################################
|
###################################################################
|
||||||
# install pip
|
# install pip
|
||||||
# pip install src/connector/python/linux/python2/
|
# pip install src/connector/python/
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -13,7 +13,7 @@ else
|
||||||
fi
|
fi
|
||||||
TAOSD_DIR=`find $TAOS_DIR -name "taosd"|grep bin|head -n1`
|
TAOSD_DIR=`find $TAOS_DIR -name "taosd"|grep bin|head -n1`
|
||||||
LIB_DIR=`echo $TAOSD_DIR|rev|cut -d '/' -f 3,4,5,6|rev`/lib
|
LIB_DIR=`echo $TAOSD_DIR|rev|cut -d '/' -f 3,4,5,6|rev`/lib
|
||||||
export PYTHONPATH=$(pwd)/../../src/connector/python/linux/python3
|
export PYTHONPATH=$(pwd)/../../src/connector/python
|
||||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$LIB_DIR
|
||||||
|
|
||||||
if [[ "$1" == *"test.py"* ]]; then
|
if [[ "$1" == *"test.py"* ]]; then
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
###################################################################
|
###################################################################
|
||||||
# install pip
|
# install pip
|
||||||
# pip install src/connector/python/linux/python2/
|
# pip install src/connector/python/
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
###################################################################
|
###################################################################
|
||||||
# install pip
|
# install pip
|
||||||
# pip install src/connector/python/linux/python2/
|
# pip install src/connector/python/
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import sys
|
import sys
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
#
|
#
|
||||||
###################################################################
|
###################################################################
|
||||||
# install pip
|
# install pip
|
||||||
# pip install src/connector/python/linux/python2/
|
# pip install src/connector/python/
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import sys
|
import sys
|
||||||
|
|
Loading…
Reference in New Issue