Merge branch 'develop' into feature/TD-4034

This commit is contained in:
lichuang 2021-05-13 11:19:51 +08:00
commit 6f5bafcdaa
192 changed files with 8801 additions and 7241 deletions

View File

@ -1,4 +1,4 @@
[![Build Status](https://travis-ci.org/taosdata/TDengine.svg?branch=master)](https://travis-ci.org/taosdata/TDengine)
[![Build Status](https://cloud.drone.io/api/badges/taosdata/TDengine/status.svg?ref=refs/heads/master)](https://cloud.drone.io/taosdata/TDengine)
[![Build status](https://ci.appveyor.com/api/projects/status/kf3pwh2or5afsgl9/branch/master?svg=true)](https://ci.appveyor.com/project/sangshuduo/tdengine-2n8ge/branch/master)
[![Coverage Status](https://coveralls.io/repos/github/taosdata/TDengine/badge.svg?branch=develop)](https://coveralls.io/github/taosdata/TDengine?branch=develop)
[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/4201/badge)](https://bestpractices.coreinfrastructure.org/projects/4201)

View File

@ -32,7 +32,7 @@ ELSEIF (TD_WINDOWS)
#INSTALL(TARGETS taos RUNTIME DESTINATION driver)
#INSTALL(TARGETS shell RUNTIME DESTINATION .)
IF (TD_MVN_INSTALLED)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.28-dist.jar DESTINATION connector/jdbc)
INSTALL(FILES ${LIBRARY_OUTPUT_PATH}/taos-jdbcdriver-2.0.29.jar DESTINATION connector/jdbc)
ENDIF ()
ELSEIF (TD_DARWIN)
SET(TD_MAKE_INSTALL_SH "${TD_COMMUNITY_DIR}/packaging/tools/make_install.sh")

View File

@ -36,6 +36,15 @@ static char monotonic_info_string[32];
static long mono_ticksPerMicrosecond = 0;
#ifdef _TD_NINGSI_60
// implement __rdtsc in ningsi60
uint64_t __rdtsc(){
unsigned int lo,hi;
__asm__ __volatile__ ("rdtsc" : "=a" (lo), "=d" (hi));
return ((uint64_t)hi << 32) | lo;
}
#endif
static monotime getMonotonicUs_x86() {
return __rdtsc() / mono_ticksPerMicrosecond;
}

View File

@ -1,5 +1,6 @@
name: tdengine
base: core18
version: '2.1.0.0'
icon: snap/gui/t-dengine.svg
summary: an open-source big data platform designed and optimized for IoT.

View File

@ -48,6 +48,8 @@ void tscLockByThread(int64_t *lockedBy);
void tscUnlockByThread(int64_t *lockedBy);
int tsInsertInitialCheck(SSqlObj *pSql);
#ifdef __cplusplus
}
#endif

View File

@ -174,7 +174,8 @@ void tscFieldInfoClear(SFieldInfo* pFieldInfo);
static FORCE_INLINE int32_t tscNumOfFields(SQueryInfo* pQueryInfo) { return pQueryInfo->fieldsInfo.numOfOutput; }
int32_t tscFieldInfoCompare(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2);
int32_t tscFieldInfoCompare(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2, int32_t *diffSize);
int32_t tscFieldInfoSetSize(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2);
void addExprParams(SSqlExpr* pExpr, char* argument, int32_t type, int32_t bytes);
@ -306,7 +307,7 @@ STableMeta* createSuperTableMeta(STableMetaMsg* pChild);
uint32_t tscGetTableMetaSize(STableMeta* pTableMeta);
CChildTableMeta* tscCreateChildMeta(STableMeta* pTableMeta);
uint32_t tscGetTableMetaMaxSize();
int32_t tscCreateTableMetaFromCChildMeta(STableMeta* pChild, const char* name);
int32_t tscCreateTableMetaFromCChildMeta(STableMeta* pChild, const char* name, void* buf);
STableMeta* tscTableMetaDup(STableMeta* pTableMeta);
int32_t tscCreateQueryFromQueryInfo(SQueryInfo* pQueryInfo, SQueryAttr* pQueryAttr, void* addr);

View File

@ -21,8 +21,8 @@ extern "C" {
#endif
#include "taosmsg.h"
#include "tstoken.h"
#include "tsclient.h"
#include "ttoken.h"
/**
* get the number of tags of this table

View File

@ -84,6 +84,7 @@ typedef struct STableMeta {
typedef struct STableMetaInfo {
STableMeta *pTableMeta; // table meta, cached in client side and acquired by name
uint32_t tableMetaSize;
SVgroupsInfo *vgroupList;
SArray *pVgroupTables; // SArray<SVgroupTableInfo>
@ -154,13 +155,12 @@ typedef struct STagCond {
typedef struct SParamInfo {
int32_t idx;
char type;
uint8_t type;
uint8_t timePrec;
int16_t bytes;
uint32_t offset;
} SParamInfo;
typedef struct SBoundColumn {
bool hasVal; // denote if current column has bound or not
int32_t offset; // all column offset value
@ -372,6 +372,7 @@ typedef struct SSqlObj {
tsem_t rspSem;
SSqlCmd cmd;
SSqlRes res;
bool isBind;
SSubqueryState subState;
struct SSqlObj **pSubs;

View File

@ -100,7 +100,7 @@ JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_getResultSetImp
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: isUpdateQueryImp
* Signature: (J)J
* Signature: (JJ)I
*/
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_isUpdateQueryImp
(JNIEnv *env, jobject jobj, jlong con, jlong tres);
@ -185,6 +185,44 @@ JNIEXPORT void JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_unsubscribeImp
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_validateCreateTableSqlImp
(JNIEnv *, jobject, jlong, jbyteArray);
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: prepareStmtImp
* Signature: ([BJ)I
*/
JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_prepareStmtImp
(JNIEnv *, jobject, jbyteArray, jlong);
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: setBindTableNameImp
* Signature: (JLjava/lang/String;J)I
*/
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_setBindTableNameImp
(JNIEnv *, jobject, jlong, jstring, jlong);
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: bindColDataImp
* Signature: (J[B[B[BIIIIJ)J
*/
JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_bindColDataImp
(JNIEnv *, jobject, jlong, jbyteArray, jbyteArray, jbyteArray, jint, jint, jint, jint, jlong);
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: executeBatchImp
* Signature: (JJ)I
*/
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_executeBatchImp(JNIEnv *env, jobject jobj, jlong stmt, jlong con);
/*
* Class: com_taosdata_jdbc_TSDBJNIConnector
* Method: executeBatchImp
* Signature: (JJ)I
*/
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_closeStmt(JNIEnv *env, jobject jobj, jlong stmt, jlong con);
#ifdef __cplusplus
}
#endif

View File

@ -688,3 +688,193 @@ JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TDDBJNIConnector_getResultTimePrec
return taos_result_precision(result);
}
JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_prepareStmtImp(JNIEnv *env, jobject jobj, jbyteArray jsql, jlong con) {
TAOS *tscon = (TAOS *)con;
if (tscon == NULL) {
jniError("jobj:%p, connection already closed", jobj);
return JNI_CONNECTION_NULL;
}
if (jsql == NULL) {
jniError("jobj:%p, conn:%p, empty sql string", jobj, tscon);
return JNI_SQL_NULL;
}
jsize len = (*env)->GetArrayLength(env, jsql);
char *str = (char *) calloc(1, sizeof(char) * (len + 1));
if (str == NULL) {
jniError("jobj:%p, conn:%p, alloc memory failed", jobj, tscon);
return JNI_OUT_OF_MEMORY;
}
(*env)->GetByteArrayRegion(env, jsql, 0, len, (jbyte *)str);
if ((*env)->ExceptionCheck(env)) {
// todo handle error
}
TAOS_STMT* pStmt = taos_stmt_init(tscon);
int32_t code = taos_stmt_prepare(pStmt, str, len);
if (code != TSDB_CODE_SUCCESS) {
jniError("jobj:%p, conn:%p, code:%s", jobj, tscon, tstrerror(code));
return JNI_TDENGINE_ERROR;
}
free(str);
return (jlong) pStmt;
}
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_setBindTableNameImp(JNIEnv *env, jobject jobj, jlong stmt, jstring jname, jlong conn) {
TAOS *tsconn = (TAOS *)conn;
if (tsconn == NULL) {
jniError("jobj:%p, connection already closed", jobj);
return JNI_CONNECTION_NULL;
}
TAOS_STMT* pStmt = (TAOS_STMT*) stmt;
if (pStmt == NULL) {
jniError("jobj:%p, conn:%p, invalid stmt handle", jobj, tsconn);
return JNI_SQL_NULL;
}
const char *name = (*env)->GetStringUTFChars(env, jname, NULL);
int32_t code = taos_stmt_set_tbname((void*)stmt, name);
if (code != TSDB_CODE_SUCCESS) {
(*env)->ReleaseStringUTFChars(env, jname, name);
jniError("jobj:%p, conn:%p, code:%s", jobj, tsconn, tstrerror(code));
return JNI_TDENGINE_ERROR;
}
jniDebug("jobj:%p, conn:%p, set stmt bind table name:%s", jobj, tsconn, name);
(*env)->ReleaseStringUTFChars(env, jname, name);
return JNI_SUCCESS;
}
JNIEXPORT jlong JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_bindColDataImp(JNIEnv *env, jobject jobj, jlong stmt,
jbyteArray colDataList, jbyteArray lengthList, jbyteArray nullList, jint dataType, jint dataBytes, jint numOfRows, jint colIndex, jlong con) {
TAOS *tscon = (TAOS *)con;
if (tscon == NULL) {
jniError("jobj:%p, connection already closed", jobj);
return JNI_CONNECTION_NULL;
}
TAOS_STMT* pStmt = (TAOS_STMT*) stmt;
if (pStmt == NULL) {
jniError("jobj:%p, conn:%p, invalid stmt", jobj, tscon);
return JNI_SQL_NULL;
}
// todo refactor
jsize len = (*env)->GetArrayLength(env, colDataList);
char *colBuf = (char *)calloc(1, len);
(*env)->GetByteArrayRegion(env, colDataList, 0, len, (jbyte *)colBuf);
if ((*env)->ExceptionCheck(env)) {
// todo handle error
}
len = (*env)->GetArrayLength(env, lengthList);
char *lengthArray = (char*) calloc(1, len);
(*env)->GetByteArrayRegion(env, lengthList, 0, len, (jbyte*) lengthArray);
if ((*env)->ExceptionCheck(env)) {
}
len = (*env)->GetArrayLength(env, nullList);
char *nullArray = (char*) calloc(1, len);
(*env)->GetByteArrayRegion(env, nullList, 0, len, (jbyte*) nullArray);
if ((*env)->ExceptionCheck(env)) {
}
// bind multi-rows with only one invoke.
TAOS_MULTI_BIND* b = calloc(1, sizeof(TAOS_MULTI_BIND));
b->num = numOfRows;
b->buffer_type = dataType; // todo check data type
b->buffer_length = IS_VAR_DATA_TYPE(dataType)? dataBytes:tDataTypes[dataType].bytes;
b->is_null = nullArray;
b->buffer = colBuf;
b->length = (int32_t*)lengthArray;
// set the length and is_null array
switch(dataType) {
case TSDB_DATA_TYPE_INT:
case TSDB_DATA_TYPE_TINYINT:
case TSDB_DATA_TYPE_SMALLINT:
case TSDB_DATA_TYPE_TIMESTAMP:
case TSDB_DATA_TYPE_BIGINT: {
int32_t bytes = tDataTypes[dataType].bytes;
for(int32_t i = 0; i < numOfRows; ++i) {
b->length[i] = bytes;
}
break;
}
case TSDB_DATA_TYPE_NCHAR:
case TSDB_DATA_TYPE_BINARY: {
// do nothing
}
}
int32_t code = taos_stmt_bind_single_param_batch(pStmt, b, colIndex);
tfree(b->length);
tfree(b->buffer);
tfree(b->is_null);
tfree(b);
if (code != TSDB_CODE_SUCCESS) {
jniError("jobj:%p, conn:%p, code:%s", jobj, tscon, tstrerror(code));
return JNI_TDENGINE_ERROR;
}
return JNI_SUCCESS;
}
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_executeBatchImp(JNIEnv *env, jobject jobj, jlong stmt, jlong con) {
TAOS *tscon = (TAOS *)con;
if (tscon == NULL) {
jniError("jobj:%p, connection already closed", jobj);
return JNI_CONNECTION_NULL;
}
TAOS_STMT *pStmt = (TAOS_STMT*) stmt;
if (pStmt == NULL) {
jniError("jobj:%p, conn:%p, invalid stmt", jobj, tscon);
return JNI_SQL_NULL;
}
taos_stmt_add_batch(pStmt);
int32_t code = taos_stmt_execute(pStmt);
if (code != TSDB_CODE_SUCCESS) {
jniError("jobj:%p, conn:%p, code:%s", jobj, tscon, tstrerror(code));
return JNI_TDENGINE_ERROR;
}
jniDebug("jobj:%p, conn:%p, batch execute", jobj, tscon);
return JNI_SUCCESS;
}
JNIEXPORT jint JNICALL Java_com_taosdata_jdbc_TSDBJNIConnector_closeStmt(JNIEnv *env, jobject jobj, jlong stmt, jlong con) {
TAOS *tscon = (TAOS *)con;
if (tscon == NULL) {
jniError("jobj:%p, connection already closed", jobj);
return JNI_CONNECTION_NULL;
}
TAOS_STMT *pStmt = (TAOS_STMT*) stmt;
if (pStmt == NULL) {
jniError("jobj:%p, conn:%p, invalid stmt", jobj, tscon);
return JNI_SQL_NULL;
}
int32_t code = taos_stmt_close(pStmt);
if (code != TSDB_CODE_SUCCESS) {
jniError("jobj:%p, conn:%p, code:%s", jobj, tscon, tstrerror(code));
return JNI_TDENGINE_ERROR;
}
jniDebug("jobj:%p, conn:%p, stmt closed", jobj, tscon);
return JNI_SUCCESS;
}

View File

@ -326,6 +326,7 @@ TAOS_ROW tscFetchRow(void *param) {
pCmd->command == TSDB_SQL_FETCH ||
pCmd->command == TSDB_SQL_SHOW ||
pCmd->command == TSDB_SQL_SHOW_CREATE_TABLE ||
pCmd->command == TSDB_SQL_SHOW_CREATE_STABLE ||
pCmd->command == TSDB_SQL_SHOW_CREATE_DATABASE ||
pCmd->command == TSDB_SQL_SELECT ||
pCmd->command == TSDB_SQL_DESCRIBE_TABLE ||
@ -679,6 +680,9 @@ static int32_t tscProcessShowCreateTable(SSqlObj *pSql) {
assert(pTableMetaInfo->pTableMeta != NULL);
const char* tableName = tNameGetTableName(&pTableMetaInfo->name);
if (pSql->cmd.command == TSDB_SQL_SHOW_CREATE_STABLE && !UTIL_TABLE_IS_SUPER_TABLE(pTableMetaInfo)) {
return TSDB_CODE_TSC_INVALID_VALUE;
}
char *result = (char *)calloc(1, TSDB_MAX_BINARY_LEN);
int32_t code = TSDB_CODE_SUCCESS;
@ -907,7 +911,7 @@ int tscProcessLocalCmd(SSqlObj *pSql) {
*/
pRes->qId = 0x1;
pRes->numOfRows = 0;
} else if (pCmd->command == TSDB_SQL_SHOW_CREATE_TABLE) {
} else if (pCmd->command == TSDB_SQL_SHOW_CREATE_TABLE || pCmd->command == TSDB_SQL_SHOW_CREATE_STABLE) {
pRes->code = tscProcessShowCreateTable(pSql);
} else if (pCmd->command == TSDB_SQL_SHOW_CREATE_DATABASE) {
pRes->code = tscProcessShowCreateDatabase(pSql);

View File

@ -29,8 +29,7 @@
#include "taosdef.h"
#include "tscLog.h"
#include "tscSubquery.h"
#include "tstoken.h"
#include "ttoken.h"
#include "tdataformat.h"
@ -386,7 +385,7 @@ int32_t tsParseOneColumn(SSchema *pSchema, SStrToken *pToken, char *payload, cha
* The server time/client time should not be mixed up in one sql string
* Do not employ sort operation is not involved if server time is used.
*/
static int32_t tsCheckTimestamp(STableDataBlocks *pDataBlocks, const char *start) {
int32_t tsCheckTimestamp(STableDataBlocks *pDataBlocks, const char *start) {
// once the data block is disordered, we do NOT keep previous timestamp any more
if (!pDataBlocks->ordered) {
return TSDB_CODE_SUCCESS;
@ -411,6 +410,7 @@ static int32_t tsCheckTimestamp(STableDataBlocks *pDataBlocks, const char *start
if (k <= pDataBlocks->prevTS && (pDataBlocks->tsSource == TSDB_USE_CLI_TS)) {
pDataBlocks->ordered = false;
tscWarn("NOT ordered input timestamp");
}
pDataBlocks->prevTS = k;
@ -464,22 +464,23 @@ int tsParseOneRow(char **str, STableDataBlocks *pDataBlocks, SSqlCmd *pCmd, int1
if (TK_STRING == sToken.type) {
// delete escape character: \\, \', \"
char delim = sToken.z[0];
int32_t cnt = 0;
int32_t j = 0;
for (uint32_t k = 1; k < sToken.n - 1; ++k) {
if (sToken.z[k] == delim || sToken.z[k] == '\\') {
if (sToken.z[k + 1] == delim) {
cnt++;
if (sToken.z[k] == '\\' || (sToken.z[k] == delim && sToken.z[k + 1] == delim)) {
tmpTokenBuf[j] = sToken.z[k + 1];
cnt++;
j++;
k++;
continue;
}
}
tmpTokenBuf[j] = sToken.z[k];
j++;
}
tmpTokenBuf[j] = 0;
sToken.z = tmpTokenBuf;
sToken.n -= 2 + cnt;
@ -693,6 +694,8 @@ void tscSortRemoveDataBlockDupRows(STableDataBlocks *dataBuf) {
pBlocks->numOfRows = i + 1;
dataBuf->size = sizeof(SSubmitBlk) + dataBuf->rowSize * pBlocks->numOfRows;
}
dataBuf->prevTS = INT64_MIN;
}
static int32_t doParseInsertStatement(SSqlCmd* pCmd, char **str, STableDataBlocks* dataBuf, int32_t *totalNum) {
@ -705,19 +708,11 @@ static int32_t doParseInsertStatement(SSqlCmd* pCmd, char **str, STableDataBlock
}
code = TSDB_CODE_TSC_INVALID_SQL;
char *tmpTokenBuf = calloc(1, 16*1024); // used for deleting Escape character: \\, \', \"
if (NULL == tmpTokenBuf) {
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
char tmpTokenBuf[16*1024] = {0}; // used for deleting Escape character: \\, \', \"
int32_t numOfRows = 0;
code = tsParseValues(str, dataBuf, maxNumOfRows, pCmd, &numOfRows, tmpTokenBuf);
free(tmpTokenBuf);
if (code != TSDB_CODE_SUCCESS) {
return code;
}
for (uint32_t i = 0; i < dataBuf->numOfParams; ++i) {
SParamInfo *param = dataBuf->params + i;
if (param->idx == -1) {
@ -934,6 +929,42 @@ static int32_t tscCheckIfCreateTable(char **sqlstr, SSqlObj *pSql, char** boundC
return tscSQLSyntaxErrMsg(pCmd->payload, ") expected", sToken.z);
}
/* parse columns after super table tags values.
* insert into table_name using super_table(tag_name1, tag_name2) tags(tag_val1, tag_val2)
* (normal_col1, normal_col2) values(normal_col1_val, normal_col2_val);
* */
index = 0;
sToken = tStrGetToken(sql, &index, false);
sql += index;
int numOfColsAfterTags = 0;
if (sToken.type == TK_LP) {
if (*boundColumn != NULL) {
return tscSQLSyntaxErrMsg(pCmd->payload, "bind columns again", sToken.z);
} else {
*boundColumn = &sToken.z[0];
}
while (1) {
index = 0;
sToken = tStrGetToken(sql, &index, false);
if (sToken.type == TK_RP) {
break;
}
sql += index;
++numOfColsAfterTags;
}
if (numOfColsAfterTags == 0 && (*boundColumn) != NULL) {
return TSDB_CODE_TSC_INVALID_SQL;
}
sToken = tStrGetToken(sql, &index, false);
}
sql = sToken.z;
if (tscValidateName(&tableToken) != TSDB_CODE_SUCCESS) {
return tscInvalidSQLErrMsg(pCmd->payload, "invalid table name", *sqlstr);
}
@ -975,7 +1006,7 @@ int validateTableName(char *tblName, int len, SStrToken* psTblToken) {
psTblToken->n = len;
psTblToken->type = TK_ID;
tSQLGetToken(psTblToken->z, &psTblToken->type);
tGetToken(psTblToken->z, &psTblToken->type);
return tscValidateName(psTblToken);
}
@ -1262,7 +1293,7 @@ int tsParseInsertSql(SSqlObj *pSql) {
goto _clean;
}
if (taosHashGetSize(pCmd->pTableBlockHashList) > 0) { // merge according to vgId
if ((pCmd->insertType != TSDB_QUERY_TYPE_STMT_INSERT) && taosHashGetSize(pCmd->pTableBlockHashList) > 0) { // merge according to vgId
if ((code = tscMergeTableDataBlocks(pSql, true)) != TSDB_CODE_SUCCESS) {
goto _clean;
}

View File

@ -24,6 +24,7 @@
#include "tscSubquery.h"
int tsParseInsertSql(SSqlObj *pSql);
int32_t tsCheckTimestamp(STableDataBlocks *pDataBlocks, const char *start);
////////////////////////////////////////////////////////////////////////////////
// functions for normal statement preparation
@ -43,10 +44,32 @@ typedef struct SNormalStmt {
tVariant* params;
} SNormalStmt;
typedef struct SMultiTbStmt {
bool nameSet;
uint64_t currentUid;
uint32_t tbNum;
SStrToken tbname;
SHashObj *pTableHash;
SHashObj *pTableBlockHashList; // data block for each table
} SMultiTbStmt;
typedef enum {
STMT_INIT = 1,
STMT_PREPARE,
STMT_SETTBNAME,
STMT_BIND,
STMT_BIND_COL,
STMT_ADD_BATCH,
STMT_EXECUTE
} STMT_ST;
typedef struct STscStmt {
bool isInsert;
bool multiTbInsert;
int16_t last;
STscObj* taos;
SSqlObj* pSql;
SMultiTbStmt mtb;
SNormalStmt normal;
} STscStmt;
@ -135,7 +158,7 @@ static int normalStmtBindParam(STscStmt* stmt, TAOS_BIND* bind) {
break;
default:
tscDebug("param %d: type mismatch or invalid", i);
tscDebug("0x%"PRIx64" bind column%d: type mismatch or invalid", stmt->pSql->self, i);
return TSDB_CODE_TSC_INVALID_VALUE;
}
}
@ -151,7 +174,7 @@ static int normalStmtPrepare(STscStmt* stmt) {
while (sql[i] != 0) {
SStrToken token = {0};
token.n = tSQLGetToken(sql + i, &token.type);
token.n = tGetToken(sql + i, &token.type);
if (token.type == TK_QUESTION) {
sql[i] = 0;
@ -255,12 +278,13 @@ static char* normalStmtBuildSql(STscStmt* stmt) {
////////////////////////////////////////////////////////////////////////////////
// functions for insertion statement preparation
static int doBindParam(char* data, SParamInfo* param, TAOS_BIND* bind) {
static int doBindParam(STableDataBlocks* pBlock, char* data, SParamInfo* param, TAOS_BIND* bind, int32_t colNum) {
if (bind->is_null != NULL && *(bind->is_null)) {
setNull(data + param->offset, param->type, param->bytes);
return TSDB_CODE_SUCCESS;
}
#if 0
if (0) {
// allow user bind param data with different type
union {
@ -641,6 +665,7 @@ static int doBindParam(char* data, SParamInfo* param, TAOS_BIND* bind) {
}
}
}
#endif
if (bind->buffer_type != param->type) {
return TSDB_CODE_TSC_INVALID_VALUE;
@ -690,12 +715,90 @@ static int doBindParam(char* data, SParamInfo* param, TAOS_BIND* bind) {
}
memcpy(data + param->offset, bind->buffer, size);
if (param->offset == 0) {
if (tsCheckTimestamp(pBlock, data + param->offset) != TSDB_CODE_SUCCESS) {
tscError("invalid timestamp");
return TSDB_CODE_TSC_INVALID_VALUE;
}
}
return TSDB_CODE_SUCCESS;
}
static int doBindBatchParam(STableDataBlocks* pBlock, SParamInfo* param, TAOS_MULTI_BIND* bind, int32_t rowNum) {
if (bind->buffer_type != param->type || !isValidDataType(param->type)) {
return TSDB_CODE_TSC_INVALID_VALUE;
}
if (IS_VAR_DATA_TYPE(param->type) && bind->length == NULL) {
tscError("BINARY/NCHAR no length");
return TSDB_CODE_TSC_INVALID_VALUE;
}
for (int i = 0; i < bind->num; ++i) {
char* data = pBlock->pData + sizeof(SSubmitBlk) + pBlock->rowSize * (rowNum + i);
if (bind->is_null != NULL && bind->is_null[i]) {
setNull(data + param->offset, param->type, param->bytes);
continue;
}
if (!IS_VAR_DATA_TYPE(param->type)) {
memcpy(data + param->offset, (char *)bind->buffer + bind->buffer_length * i, tDataTypes[param->type].bytes);
if (param->offset == 0) {
if (tsCheckTimestamp(pBlock, data + param->offset) != TSDB_CODE_SUCCESS) {
tscError("invalid timestamp");
return TSDB_CODE_TSC_INVALID_VALUE;
}
}
} else if (param->type == TSDB_DATA_TYPE_BINARY) {
if (bind->length[i] > (uintptr_t)param->bytes) {
tscError("binary length too long, ignore it, max:%d, actual:%d", param->bytes, (int32_t)bind->length[i]);
return TSDB_CODE_TSC_INVALID_VALUE;
}
int16_t bsize = (short)bind->length[i];
STR_WITH_SIZE_TO_VARSTR(data + param->offset, (char *)bind->buffer + bind->buffer_length * i, bsize);
} else if (param->type == TSDB_DATA_TYPE_NCHAR) {
if (bind->length[i] > (uintptr_t)param->bytes) {
tscError("nchar string length too long, ignore it, max:%d, actual:%d", param->bytes, (int32_t)bind->length[i]);
return TSDB_CODE_TSC_INVALID_VALUE;
}
int32_t output = 0;
if (!taosMbsToUcs4((char *)bind->buffer + bind->buffer_length * i, bind->length[i], varDataVal(data + param->offset), param->bytes - VARSTR_HEADER_SIZE, &output)) {
tscError("convert nchar string to UCS4_LE failed:%s", (char*)((char *)bind->buffer + bind->buffer_length * i));
return TSDB_CODE_TSC_INVALID_VALUE;
}
varDataSetLen(data + param->offset, output);
}
}
return TSDB_CODE_SUCCESS;
}
static int insertStmtBindParam(STscStmt* stmt, TAOS_BIND* bind) {
SSqlCmd* pCmd = &stmt->pSql->cmd;
STscStmt* pStmt = (STscStmt*)stmt;
STableDataBlocks* pBlock = NULL;
if (pStmt->multiTbInsert) {
if (pCmd->pTableBlockHashList == NULL) {
tscError("0x%"PRIx64" Table block hash list is empty", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
STableDataBlocks** t1 = (STableDataBlocks**)taosHashGet(pCmd->pTableBlockHashList, (const char*)&pStmt->mtb.currentUid, sizeof(pStmt->mtb.currentUid));
if (t1 == NULL) {
tscError("0x%"PRIx64" no table data block in hash list, uid:%" PRId64 , pStmt->pSql->self, pStmt->mtb.currentUid);
return TSDB_CODE_TSC_APP_ERROR;
}
pBlock = *t1;
} else {
STableMetaInfo* pTableMetaInfo = tscGetTableMetaInfoFromCmd(pCmd, 0, 0);
STableMeta* pTableMeta = pTableMetaInfo->pTableMeta;
@ -703,16 +806,15 @@ static int insertStmtBindParam(STscStmt* stmt, TAOS_BIND* bind) {
pCmd->pTableBlockHashList = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, false);
}
STableDataBlocks* pBlock = NULL;
int32_t ret =
tscGetDataBlockFromList(pCmd->pTableBlockHashList, pTableMeta->id.uid, TSDB_PAYLOAD_SIZE, sizeof(SSubmitBlk),
pTableMeta->tableInfo.rowSize, &pTableMetaInfo->name, pTableMeta, &pBlock, NULL);
if (ret != 0) {
// todo handle error
return ret;
}
}
uint32_t totalDataSize = sizeof(SSubmitBlk) + pCmd->batchSize * pBlock->rowSize;
uint32_t totalDataSize = sizeof(SSubmitBlk) + (pCmd->batchSize + 1) * pBlock->rowSize;
if (totalDataSize > pBlock->nAllocSize) {
const double factor = 1.5;
@ -729,9 +831,9 @@ static int insertStmtBindParam(STscStmt* stmt, TAOS_BIND* bind) {
for (uint32_t j = 0; j < pBlock->numOfParams; ++j) {
SParamInfo* param = &pBlock->params[j];
int code = doBindParam(data, param, &bind[param->idx]);
int code = doBindParam(pBlock, data, param, &bind[param->idx], 1);
if (code != TSDB_CODE_SUCCESS) {
tscDebug("param %d: type mismatch or invalid", param->idx);
tscDebug("0x%"PRIx64" bind column %d: type mismatch or invalid", pStmt->pSql->self, param->idx);
return code;
}
}
@ -739,9 +841,135 @@ static int insertStmtBindParam(STscStmt* stmt, TAOS_BIND* bind) {
return TSDB_CODE_SUCCESS;
}
static int insertStmtBindParamBatch(STscStmt* stmt, TAOS_MULTI_BIND* bind, int colIdx) {
SSqlCmd* pCmd = &stmt->pSql->cmd;
STscStmt* pStmt = (STscStmt*)stmt;
int rowNum = bind->num;
STableDataBlocks* pBlock = NULL;
if (pStmt->multiTbInsert) {
if (pCmd->pTableBlockHashList == NULL) {
tscError("0x%"PRIx64" Table block hash list is empty", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
STableDataBlocks** t1 = (STableDataBlocks**)taosHashGet(pCmd->pTableBlockHashList, (const char*)&pStmt->mtb.currentUid, sizeof(pStmt->mtb.currentUid));
if (t1 == NULL) {
tscError("0x%"PRIx64" no table data block in hash list, uid:%" PRId64 , pStmt->pSql->self, pStmt->mtb.currentUid);
return TSDB_CODE_TSC_APP_ERROR;
}
pBlock = *t1;
} else {
STableMetaInfo* pTableMetaInfo = tscGetTableMetaInfoFromCmd(pCmd, 0, 0);
STableMeta* pTableMeta = pTableMetaInfo->pTableMeta;
if (pCmd->pTableBlockHashList == NULL) {
pCmd->pTableBlockHashList = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, false);
}
int32_t ret =
tscGetDataBlockFromList(pCmd->pTableBlockHashList, pTableMeta->id.uid, TSDB_PAYLOAD_SIZE, sizeof(SSubmitBlk),
pTableMeta->tableInfo.rowSize, &pTableMetaInfo->name, pTableMeta, &pBlock, NULL);
if (ret != 0) {
return ret;
}
}
assert(colIdx == -1 || (colIdx >= 0 && colIdx < pBlock->numOfParams));
uint32_t totalDataSize = sizeof(SSubmitBlk) + (pCmd->batchSize + rowNum) * pBlock->rowSize;
if (totalDataSize > pBlock->nAllocSize) {
const double factor = 1.5;
void* tmp = realloc(pBlock->pData, (uint32_t)(totalDataSize * factor));
if (tmp == NULL) {
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
pBlock->pData = (char*)tmp;
pBlock->nAllocSize = (uint32_t)(totalDataSize * factor);
}
if (colIdx == -1) {
for (uint32_t j = 0; j < pBlock->numOfParams; ++j) {
SParamInfo* param = &pBlock->params[j];
if (bind[param->idx].num != rowNum) {
tscError("0x%"PRIx64" param %d: num[%d:%d] not match", pStmt->pSql->self, param->idx, rowNum, bind[param->idx].num);
return TSDB_CODE_TSC_INVALID_VALUE;
}
int code = doBindBatchParam(pBlock, param, &bind[param->idx], pCmd->batchSize);
if (code != TSDB_CODE_SUCCESS) {
tscError("0x%"PRIx64" bind column %d: type mismatch or invalid", pStmt->pSql->self, param->idx);
return code;
}
}
pCmd->batchSize += rowNum - 1;
} else {
SParamInfo* param = &pBlock->params[colIdx];
int code = doBindBatchParam(pBlock, param, bind, pCmd->batchSize);
if (code != TSDB_CODE_SUCCESS) {
tscError("0x%"PRIx64" bind column %d: type mismatch or invalid", pStmt->pSql->self, param->idx);
return code;
}
if (colIdx == (pBlock->numOfParams - 1)) {
pCmd->batchSize += rowNum - 1;
}
}
return TSDB_CODE_SUCCESS;
}
static int insertStmtUpdateBatch(STscStmt* stmt) {
SSqlObj* pSql = stmt->pSql;
SSqlCmd* pCmd = &pSql->cmd;
STableDataBlocks* pBlock = NULL;
if (pCmd->batchSize > INT16_MAX) {
tscError("too many record:%d", pCmd->batchSize);
return TSDB_CODE_TSC_APP_ERROR;
}
assert(pCmd->numOfClause == 1);
if (taosHashGetSize(pCmd->pTableBlockHashList) == 0) {
return TSDB_CODE_SUCCESS;
}
STableDataBlocks** t1 = (STableDataBlocks**)taosHashGet(pCmd->pTableBlockHashList, (const char*)&stmt->mtb.currentUid, sizeof(stmt->mtb.currentUid));
if (t1 == NULL) {
tscError("0x%"PRIx64" no table data block in hash list, uid:%" PRId64 , pSql->self, stmt->mtb.currentUid);
return TSDB_CODE_TSC_APP_ERROR;
}
pBlock = *t1;
STableMeta* pTableMeta = pBlock->pTableMeta;
pBlock->size = sizeof(SSubmitBlk) + pCmd->batchSize * pBlock->rowSize;
SSubmitBlk* pBlk = (SSubmitBlk*) pBlock->pData;
pBlk->numOfRows = pCmd->batchSize;
pBlk->dataLen = 0;
pBlk->uid = pTableMeta->id.uid;
pBlk->tid = pTableMeta->id.tid;
return TSDB_CODE_SUCCESS;
}
static int insertStmtAddBatch(STscStmt* stmt) {
SSqlCmd* pCmd = &stmt->pSql->cmd;
++pCmd->batchSize;
if (stmt->multiTbInsert) {
return insertStmtUpdateBatch(stmt);
}
return TSDB_CODE_SUCCESS;
}
@ -835,6 +1063,83 @@ static int insertStmtExecute(STscStmt* stmt) {
return pSql->res.code;
}
static void insertBatchClean(STscStmt* pStmt) {
SSqlCmd *pCmd = &pStmt->pSql->cmd;
SSqlObj *pSql = pStmt->pSql;
int32_t size = taosHashGetSize(pCmd->pTableBlockHashList);
// data block reset
pCmd->batchSize = 0;
for(int32_t i = 0; i < size; ++i) {
if (pCmd->pTableNameList && pCmd->pTableNameList[i]) {
tfree(pCmd->pTableNameList[i]);
}
}
tfree(pCmd->pTableNameList);
/*
STableDataBlocks** p = taosHashIterate(pCmd->pTableBlockHashList, NULL);
STableDataBlocks* pOneTableBlock = *p;
while (1) {
SSubmitBlk* pBlocks = (SSubmitBlk*) pOneTableBlock->pData;
pOneTableBlock->size = sizeof(SSubmitBlk);
pBlocks->numOfRows = 0;
p = taosHashIterate(pCmd->pTableBlockHashList, p);
if (p == NULL) {
break;
}
pOneTableBlock = *p;
}
*/
pCmd->pDataBlocks = tscDestroyBlockArrayList(pCmd->pDataBlocks);
pCmd->numOfTables = 0;
taosHashEmpty(pCmd->pTableBlockHashList);
tscFreeSqlResult(pSql);
tscFreeSubobj(pSql);
tfree(pSql->pSubs);
pSql->subState.numOfSub = 0;
}
static int insertBatchStmtExecute(STscStmt* pStmt) {
int32_t code = 0;
if(pStmt->mtb.nameSet == false) {
tscError("0x%"PRIx64" no table name set", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
pStmt->pSql->retry = pStmt->pSql->maxRetry + 1; //no retry
if (taosHashGetSize(pStmt->pSql->cmd.pTableBlockHashList) > 0) { // merge according to vgId
if ((code = tscMergeTableDataBlocks(pStmt->pSql, false)) != TSDB_CODE_SUCCESS) {
return code;
}
}
code = tscHandleMultivnodeInsert(pStmt->pSql);
if (code != TSDB_CODE_SUCCESS) {
return code;
}
// wait for the callback function to post the semaphore
tsem_wait(&pStmt->pSql->rspSem);
insertBatchClean(pStmt);
return pStmt->pSql->res.code;
}
////////////////////////////////////////////////////////////////////////////////
// interface functions
@ -866,7 +1171,9 @@ TAOS_STMT* taos_stmt_init(TAOS* taos) {
pSql->signature = pSql;
pSql->pTscObj = pObj;
pSql->maxRetry = TSDB_MAX_REPLICA;
pSql->isBind = true;
pStmt->pSql = pSql;
pStmt->last = STMT_INIT;
return pStmt;
}
@ -879,6 +1186,13 @@ int taos_stmt_prepare(TAOS_STMT* stmt, const char* sql, unsigned long length) {
return TSDB_CODE_TSC_DISCONNECTED;
}
if (pStmt->last != STMT_INIT) {
tscError("prepare status error, last:%d", pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
pStmt->last = STMT_PREPARE;
SSqlObj* pSql = pStmt->pSql;
size_t sqlLen = strlen(sql);
@ -917,6 +1231,36 @@ int taos_stmt_prepare(TAOS_STMT* stmt, const char* sql, unsigned long length) {
registerSqlObj(pSql);
int32_t ret = TSDB_CODE_SUCCESS;
if ((ret = tsInsertInitialCheck(pSql)) != TSDB_CODE_SUCCESS) {
return ret;
}
int32_t index = 0;
SStrToken sToken = tStrGetToken(pCmd->curSql, &index, false);
if (sToken.n == 0) {
return TSDB_CODE_TSC_INVALID_SQL;
}
if (sToken.n == 1 && sToken.type == TK_QUESTION) {
pStmt->multiTbInsert = true;
pStmt->mtb.tbname = sToken;
pStmt->mtb.nameSet = false;
if (pStmt->mtb.pTableHash == NULL) {
pStmt->mtb.pTableHash = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BINARY), true, false);
}
if (pStmt->mtb.pTableBlockHashList == NULL) {
pStmt->mtb.pTableBlockHashList = taosHashInit(16, taosGetDefaultHashFunction(TSDB_DATA_TYPE_BIGINT), true, false);
}
return TSDB_CODE_SUCCESS;
}
pStmt->multiTbInsert = false;
memset(&pStmt->mtb, 0, sizeof(pStmt->mtb));
int32_t code = tsParseSql(pSql, true);
if (code == TSDB_CODE_TSC_ACTION_IN_PROGRESS) {
// wait for the callback function to post the semaphore
@ -931,6 +1275,105 @@ int taos_stmt_prepare(TAOS_STMT* stmt, const char* sql, unsigned long length) {
return normalStmtPrepare(pStmt);
}
int taos_stmt_set_tbname(TAOS_STMT* stmt, const char* name) {
STscStmt* pStmt = (STscStmt*)stmt;
SSqlObj* pSql = pStmt->pSql;
SSqlCmd* pCmd = &pSql->cmd;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (name == NULL) {
terrno = TSDB_CODE_TSC_APP_ERROR;
tscError("0x%"PRIx64" name is NULL", pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (pStmt->multiTbInsert == false || !tscIsInsertData(pSql->sqlstr)) {
terrno = TSDB_CODE_TSC_APP_ERROR;
tscError("0x%"PRIx64" not multi table insert", pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (pStmt->last == STMT_INIT || pStmt->last == STMT_BIND || pStmt->last == STMT_BIND_COL) {
tscError("0x%"PRIx64" settbname status error, last:%d", pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
pStmt->last = STMT_SETTBNAME;
uint64_t* uid = (uint64_t*)taosHashGet(pStmt->mtb.pTableHash, name, strlen(name));
if (uid != NULL) {
pStmt->mtb.currentUid = *uid;
STableDataBlocks** t1 = (STableDataBlocks**)taosHashGet(pStmt->mtb.pTableBlockHashList, (const char*)&pStmt->mtb.currentUid, sizeof(pStmt->mtb.currentUid));
if (t1 == NULL) {
tscError("0x%"PRIx64" no table data block in hash list, uid:%" PRId64 , pSql->self, pStmt->mtb.currentUid);
return TSDB_CODE_TSC_APP_ERROR;
}
SSubmitBlk* pBlk = (SSubmitBlk*) (*t1)->pData;
pCmd->batchSize = pBlk->numOfRows;
taosHashPut(pCmd->pTableBlockHashList, (void *)&pStmt->mtb.currentUid, sizeof(pStmt->mtb.currentUid), (void*)t1, POINTER_BYTES);
tscDebug("0x%"PRIx64" table:%s is already prepared, uid:%" PRIu64, pSql->self, name, pStmt->mtb.currentUid);
return TSDB_CODE_SUCCESS;
}
pStmt->mtb.tbname = tscReplaceStrToken(&pSql->sqlstr, &pStmt->mtb.tbname, name);
pStmt->mtb.nameSet = true;
tscDebug("0x%"PRIx64" SQL: %s", pSql->self, pSql->sqlstr);
pSql->cmd.parseFinished = 0;
pSql->cmd.numOfParams = 0;
pSql->cmd.batchSize = 0;
if (taosHashGetSize(pCmd->pTableBlockHashList) > 0) {
SHashObj* hashList = pCmd->pTableBlockHashList;
pCmd->pTableBlockHashList = NULL;
tscResetSqlCmd(pCmd, true);
pCmd->pTableBlockHashList = hashList;
}
int32_t code = tsParseSql(pStmt->pSql, true);
if (code == TSDB_CODE_TSC_ACTION_IN_PROGRESS) {
// wait for the callback function to post the semaphore
tsem_wait(&pStmt->pSql->rspSem);
code = pStmt->pSql->res.code;
}
if (code == TSDB_CODE_SUCCESS) {
STableMetaInfo* pTableMetaInfo = tscGetTableMetaInfoFromCmd(pCmd, 0, 0);
STableMeta* pTableMeta = pTableMetaInfo->pTableMeta;
STableDataBlocks* pBlock = NULL;
code = tscGetDataBlockFromList(pCmd->pTableBlockHashList, pTableMeta->id.uid, TSDB_PAYLOAD_SIZE, sizeof(SSubmitBlk),
pTableMeta->tableInfo.rowSize, &pTableMetaInfo->name, pTableMeta, &pBlock, NULL);
if (code != TSDB_CODE_SUCCESS) {
return code;
}
SSubmitBlk* blk = (SSubmitBlk*)pBlock->pData;
blk->numOfRows = 0;
pStmt->mtb.currentUid = pTableMeta->id.uid;
pStmt->mtb.tbNum++;
taosHashPut(pStmt->mtb.pTableBlockHashList, (void *)&pStmt->mtb.currentUid, sizeof(pStmt->mtb.currentUid), (void*)&pBlock, POINTER_BYTES);
taosHashPut(pStmt->mtb.pTableHash, name, strlen(name), (char*) &pTableMeta->id.uid, sizeof(pTableMeta->id.uid));
tscDebug("0x%"PRIx64" table:%s is prepared, uid:%" PRIx64, pSql->self, name, pStmt->mtb.currentUid);
}
return code;
}
int taos_stmt_close(TAOS_STMT* stmt) {
STscStmt* pStmt = (STscStmt*)stmt;
if (!pStmt->isInsert) {
@ -943,6 +1386,13 @@ int taos_stmt_close(TAOS_STMT* stmt) {
}
free(normal->parts);
free(normal->sql);
} else {
if (pStmt->multiTbInsert) {
taosHashCleanup(pStmt->mtb.pTableHash);
pStmt->mtb.pTableBlockHashList = tscDestroyBlockHashTable(pStmt->mtb.pTableBlockHashList, true);
taosHashCleanup(pStmt->pSql->cmd.pTableBlockHashList);
pStmt->pSql->cmd.pTableBlockHashList = NULL;
}
}
taos_free_result(pStmt->pSql);
@ -952,18 +1402,122 @@ int taos_stmt_close(TAOS_STMT* stmt) {
int taos_stmt_bind_param(TAOS_STMT* stmt, TAOS_BIND* bind) {
STscStmt* pStmt = (STscStmt*)stmt;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (pStmt->isInsert) {
if (pStmt->multiTbInsert) {
if (pStmt->last != STMT_SETTBNAME && pStmt->last != STMT_ADD_BATCH) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
} else {
if (pStmt->last != STMT_PREPARE && pStmt->last != STMT_ADD_BATCH && pStmt->last != STMT_EXECUTE) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
}
pStmt->last = STMT_BIND;
return insertStmtBindParam(pStmt, bind);
} else {
return normalStmtBindParam(pStmt, bind);
}
}
int taos_stmt_bind_param_batch(TAOS_STMT* stmt, TAOS_MULTI_BIND* bind) {
STscStmt* pStmt = (STscStmt*)stmt;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (bind == NULL || bind->num <= 0 || bind->num > INT16_MAX) {
tscError("0x%"PRIx64" invalid parameter", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (!pStmt->isInsert) {
tscError("0x%"PRIx64" not or invalid batch insert", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (pStmt->multiTbInsert) {
if (pStmt->last != STMT_SETTBNAME && pStmt->last != STMT_ADD_BATCH) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
} else {
if (pStmt->last != STMT_PREPARE && pStmt->last != STMT_ADD_BATCH && pStmt->last != STMT_EXECUTE) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
}
pStmt->last = STMT_BIND;
return insertStmtBindParamBatch(pStmt, bind, -1);
}
int taos_stmt_bind_single_param_batch(TAOS_STMT* stmt, TAOS_MULTI_BIND* bind, int colIdx) {
STscStmt* pStmt = (STscStmt*)stmt;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (bind == NULL || bind->num <= 0 || bind->num > INT16_MAX) {
tscError("0x%"PRIx64" invalid parameter", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (!pStmt->isInsert) {
tscError("0x%"PRIx64" not or invalid batch insert", pStmt->pSql->self);
return TSDB_CODE_TSC_APP_ERROR;
}
if (pStmt->multiTbInsert) {
if (pStmt->last != STMT_SETTBNAME && pStmt->last != STMT_ADD_BATCH && pStmt->last != STMT_BIND_COL) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
} else {
if (pStmt->last != STMT_PREPARE && pStmt->last != STMT_ADD_BATCH && pStmt->last != STMT_BIND_COL && pStmt->last != STMT_EXECUTE) {
tscError("0x%"PRIx64" bind param status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
}
pStmt->last = STMT_BIND_COL;
return insertStmtBindParamBatch(pStmt, bind, colIdx);
}
int taos_stmt_add_batch(TAOS_STMT* stmt) {
STscStmt* pStmt = (STscStmt*)stmt;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (pStmt->isInsert) {
if (pStmt->last != STMT_BIND && pStmt->last != STMT_BIND_COL) {
tscError("0x%"PRIx64" add batch status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
pStmt->last = STMT_ADD_BATCH;
return insertStmtAddBatch(pStmt);
}
return TSDB_CODE_COM_OPS_NOT_SUPPORT;
}
@ -978,8 +1532,24 @@ int taos_stmt_reset(TAOS_STMT* stmt) {
int taos_stmt_execute(TAOS_STMT* stmt) {
int ret = 0;
STscStmt* pStmt = (STscStmt*)stmt;
if (stmt == NULL || pStmt->pSql == NULL || pStmt->taos == NULL) {
terrno = TSDB_CODE_TSC_DISCONNECTED;
return TSDB_CODE_TSC_DISCONNECTED;
}
if (pStmt->isInsert) {
if (pStmt->last != STMT_ADD_BATCH) {
tscError("0x%"PRIx64" exec status error, last:%d", pStmt->pSql->self, pStmt->last);
return TSDB_CODE_TSC_APP_ERROR;
}
pStmt->last = STMT_EXECUTE;
if (pStmt->multiTbInsert) {
ret = insertBatchStmtExecute(pStmt);
} else {
ret = insertStmtExecute(pStmt);
}
} else { // normal stmt query
char* sql = normalStmtBuildSql(pStmt);
if (sql == NULL) {
@ -1074,7 +1644,7 @@ int taos_stmt_get_param(TAOS_STMT *stmt, int idx, int *type, int *bytes) {
}
if (idx<0 || idx>=pBlock->numOfParams) {
tscError("param %d: out of range", idx);
tscError("0x%"PRIx64" param %d: out of range", pStmt->pSql->self, idx);
abort();
}

View File

@ -21,19 +21,19 @@
#endif // __APPLE__
#include "os.h"
#include "ttype.h"
#include "texpr.h"
#include "taos.h"
#include "taosmsg.h"
#include "tcompare.h"
#include "texpr.h"
#include "tname.h"
#include "tscLog.h"
#include "tscUtil.h"
#include "tschemautil.h"
#include "tsclient.h"
#include "tstoken.h"
#include "tstrbuild.h"
#include "ttoken.h"
#include "ttokendef.h"
#include "ttype.h"
#include "qUtil.h"
#define DEFAULT_PRIMARY_TIMESTAMP_COL_NAME "_c0"
@ -432,7 +432,6 @@ int32_t tscToSQLCmd(SSqlObj* pSql, struct SSqlInfo* pInfo) {
if (tscValidateName(pToken) != TSDB_CODE_SUCCESS) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg1);
}
// additional msg has been attached already
code = tscSetTableFullName(pTableMetaInfo, pToken, pSql);
if (code != TSDB_CODE_SUCCESS) {
@ -441,6 +440,7 @@ int32_t tscToSQLCmd(SSqlObj* pSql, struct SSqlInfo* pInfo) {
return tscGetTableMeta(pSql, pTableMetaInfo);
}
case TSDB_SQL_SHOW_CREATE_STABLE:
case TSDB_SQL_SHOW_CREATE_TABLE: {
const char* msg1 = "invalid table name";
@ -635,18 +635,26 @@ int32_t tscToSQLCmd(SSqlObj* pSql, struct SSqlInfo* pInfo) {
// set the command/global limit parameters from the first subclause to the sqlcmd object
SQueryInfo* pQueryInfo1 = tscGetQueryInfo(pCmd, 0);
pCmd->command = pQueryInfo1->command;
int32_t diffSize = 0;
// if there is only one element, the limit of clause is the limit of global result.
// validate the select node for "UNION ALL" subclause
for (int32_t i = 1; i < pCmd->numOfClause; ++i) {
SQueryInfo* pQueryInfo2 = tscGetQueryInfo(pCmd, i);
int32_t ret = tscFieldInfoCompare(&pQueryInfo1->fieldsInfo, &pQueryInfo2->fieldsInfo);
int32_t ret = tscFieldInfoCompare(&pQueryInfo1->fieldsInfo, &pQueryInfo2->fieldsInfo, &diffSize);
if (ret != 0) {
return invalidSqlErrMsg(tscGetErrorMsgPayload(pCmd), msg1);
}
}
if (diffSize) {
for (int32_t i = 1; i < pCmd->numOfClause; ++i) {
SQueryInfo* pQueryInfo2 = tscGetQueryInfo(pCmd, i);
tscFieldInfoSetSize(&pQueryInfo1->fieldsInfo, &pQueryInfo2->fieldsInfo);
}
}
pCmd->parseFinished = 1;
return TSDB_CODE_SUCCESS; // do not build query message here
}
@ -976,7 +984,6 @@ int32_t tscSetTableFullName(STableMetaInfo* pTableMetaInfo, SStrToken* pTableNam
const char* msg4 = "db name too long";
const char* msg5 = "table name too long";
SSqlCmd* pCmd = &pSql->cmd;
int32_t code = TSDB_CODE_SUCCESS;
int32_t idx = getDelimiterIndex(pTableName);
@ -1606,11 +1613,27 @@ bool isValidDistinctSql(SQueryInfo* pQueryInfo) {
return false;
}
static bool hasNoneUserDefineExpr(SQueryInfo* pQueryInfo) {
size_t numOfExprs = taosArrayGetSize(pQueryInfo->exprList);
for (int32_t i = 0; i < numOfExprs; ++i) {
SSqlExpr* pExpr = taosArrayGetP(pQueryInfo->exprList, i);
if (TSDB_COL_IS_UD_COL(pExpr->colInfo.flag)) {
continue;
}
return true;
}
return false;
}
int32_t validateSelectNodeList(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SArray* pSelNodeList, bool isSTable, bool joinQuery,
bool timeWindowQuery) {
assert(pSelNodeList != NULL && pCmd != NULL);
const char* msg1 = "too many items in selection clause";
const char* msg2 = "functions or others can not be mixed up";
const char* msg3 = "not support query expression";
const char* msg4 = "only support distinct one tag";
@ -1675,7 +1698,7 @@ int32_t validateSelectNodeList(SSqlCmd* pCmd, SQueryInfo* pQueryInfo, SArray* pS
// there is only one user-defined column in the final result field, add the timestamp column.
size_t numOfSrcCols = taosArrayGetSize(pQueryInfo->colList);
if (numOfSrcCols <= 0 && !tscQueryTags(pQueryInfo) && !tscQueryBlockInfo(pQueryInfo)) {
if ((numOfSrcCols <= 0 || !hasNoneUserDefineExpr(pQueryInfo)) && !tscQueryTags(pQueryInfo) && !tscQueryBlockInfo(pQueryInfo)) {
addPrimaryTsColIntoResult(pQueryInfo);
}
@ -4621,7 +4644,7 @@ int32_t getTimeRange(STimeWindow* win, tSqlExpr* pRight, int32_t optr, int16_t t
}
} else {
SStrToken token = {.z = pRight->value.pz, .n = pRight->value.nLen, .type = TK_ID};
int32_t len = tSQLGetToken(pRight->value.pz, &token.type);
int32_t len = tGetToken(pRight->value.pz, &token.type);
if ((token.type != TK_INTEGER && token.type != TK_FLOAT) || len != pRight->value.nLen) {
return TSDB_CODE_TSC_INVALID_SQL;
@ -5508,13 +5531,13 @@ int32_t validateLocalConfig(SMiscInfo* pOptions) {
}
int32_t validateColumnName(char* name) {
bool ret = isKeyWord(name, (int32_t)strlen(name));
bool ret = taosIsKeyWordToken(name, (int32_t)strlen(name));
if (ret) {
return TSDB_CODE_TSC_INVALID_SQL;
}
SStrToken token = {.z = name};
token.n = tSQLGetToken(name, &token.type);
token.n = tGetToken(name, &token.type);
if (token.type != TK_STRING && token.type != TK_ID) {
return TSDB_CODE_TSC_INVALID_SQL;
@ -5525,7 +5548,7 @@ int32_t validateColumnName(char* name) {
strntolower(token.z, token.z, token.n);
token.n = (uint32_t)strtrim(token.z);
int32_t k = tSQLGetToken(token.z, &token.type);
int32_t k = tGetToken(token.z, &token.type);
if (k != token.n) {
return TSDB_CODE_TSC_INVALID_SQL;
}
@ -7502,4 +7525,3 @@ bool hasNormalColumnFilter(SQueryInfo* pQueryInfo) {
return false;
}

View File

@ -1881,6 +1881,8 @@ int tscProcessTableMetaRsp(SSqlObj *pSql) {
return TSDB_CODE_TSC_INVALID_VALUE;
}
assert(pTableMeta->tableType == TSDB_SUPER_TABLE || pTableMeta->tableType == TSDB_CHILD_TABLE || pTableMeta->tableType == TSDB_NORMAL_TABLE || pTableMeta->tableType == TSDB_STREAM_TABLE);
if (pTableMeta->tableType == TSDB_CHILD_TABLE) {
// check if super table hashmap or not
int32_t len = (int32_t) strnlen(pTableMeta->sTableName, TSDB_TABLE_FNAME_LEN);
@ -2441,10 +2443,23 @@ static int32_t getTableMetaFromMnode(SSqlObj *pSql, STableMetaInfo *pTableMetaIn
int32_t tscGetTableMeta(SSqlObj *pSql, STableMetaInfo *pTableMetaInfo) {
assert(tIsValidName(&pTableMetaInfo->name));
tfree(pTableMetaInfo->pTableMeta);
uint32_t size = tscGetTableMetaMaxSize();
if (pTableMetaInfo->pTableMeta == NULL) {
pTableMetaInfo->pTableMeta = calloc(1, size);
pTableMetaInfo->tableMetaSize = size;
} else if (pTableMetaInfo->tableMetaSize < size) {
char *tmp = realloc(pTableMetaInfo->pTableMeta, size);
if (tmp == NULL) {
return TSDB_CODE_TSC_OUT_OF_MEMORY;
}
pTableMetaInfo->pTableMeta = (STableMeta *)tmp;
memset(pTableMetaInfo->pTableMeta, 0, size);
pTableMetaInfo->tableMetaSize = size;
} else {
//uint32_t s = tscGetTableMetaSize(pTableMetaInfo->pTableMeta);
memset(pTableMetaInfo->pTableMeta, 0, size);
pTableMetaInfo->tableMetaSize = size;
}
pTableMetaInfo->pTableMeta->tableType = -1;
pTableMetaInfo->pTableMeta->tableInfo.numOfColumns = -1;
@ -2456,10 +2471,13 @@ int32_t tscGetTableMeta(SSqlObj *pSql, STableMetaInfo *pTableMetaInfo) {
taosHashGetClone(tscTableMetaInfo, name, len, NULL, pTableMetaInfo->pTableMeta, -1);
// TODO resize the tableMeta
char buf[80*1024] = {0};
assert(size < 80*1024);
STableMeta* pMeta = pTableMetaInfo->pTableMeta;
if (pMeta->id.uid > 0) {
if (pMeta->tableType == TSDB_CHILD_TABLE) {
int32_t code = tscCreateTableMetaFromCChildMeta(pTableMetaInfo->pTableMeta, name);
int32_t code = tscCreateTableMetaFromCChildMeta(pTableMetaInfo->pTableMeta, name, buf);
if (code != TSDB_CODE_SUCCESS) {
return getTableMetaFromMnode(pSql, pTableMetaInfo);
}
@ -2641,6 +2659,7 @@ void tscInitMsgsFp() {
tscProcessMsgRsp[TSDB_SQL_ALTER_DB] = tscProcessAlterDbMsgRsp;
tscProcessMsgRsp[TSDB_SQL_SHOW_CREATE_TABLE] = tscProcessShowCreateRsp;
tscProcessMsgRsp[TSDB_SQL_SHOW_CREATE_STABLE] = tscProcessShowCreateRsp;
tscProcessMsgRsp[TSDB_SQL_SHOW_CREATE_DATABASE] = tscProcessShowCreateRsp;
tscKeepConn[TSDB_SQL_SHOW] = 1;

View File

@ -457,6 +457,7 @@ static bool needToFetchNewBlock(SSqlObj* pSql) {
pCmd->command == TSDB_SQL_FETCH ||
pCmd->command == TSDB_SQL_SHOW ||
pCmd->command == TSDB_SQL_SHOW_CREATE_TABLE ||
pCmd->command == TSDB_SQL_SHOW_CREATE_STABLE ||
pCmd->command == TSDB_SQL_SHOW_CREATE_DATABASE ||
pCmd->command == TSDB_SQL_SELECT ||
pCmd->command == TSDB_SQL_DESCRIBE_TABLE ||
@ -962,7 +963,7 @@ static int tscParseTblNameList(SSqlObj *pSql, const char *tblNameList, int32_t t
len = (int32_t)strtrim(tblName);
SStrToken sToken = {.n = len, .type = TK_ID, .z = tblName};
tSQLGetToken(tblName, &sToken.type);
tGetToken(tblName, &sToken.type);
// Check if the table name available or not
if (tscValidateName(&sToken) != TSDB_CODE_SUCCESS) {

View File

@ -637,7 +637,13 @@ static int32_t tscLaunchRealSubqueries(SSqlObj* pSql) {
int16_t colId = tscGetJoinTagColIdByUid(&pQueryInfo->tagCond, pTableMetaInfo->pTableMeta->id.uid);
// set the tag column id for executor to extract correct tag value
#ifndef _TD_NINGSI_60
pExpr->base.param[0] = (tVariant) {.i64 = colId, .nType = TSDB_DATA_TYPE_BIGINT, .nLen = sizeof(int64_t)};
#else
pExpr->base.param[0].i64 = colId;
pExpr->base.param[0].nType = TSDB_DATA_TYPE_BIGINT;
pExpr->base.param[0].nLen = sizeof(int64_t);
#endif
pExpr->base.numOfParams = 1;
}

View File

@ -1039,6 +1039,7 @@ int32_t tscCopyDataBlockToPayload(SSqlObj* pSql, STableDataBlocks* pDataBlock) {
}
pTableMetaInfo->pTableMeta = tscTableMetaDup(pDataBlock->pTableMeta);
pTableMetaInfo->tableMetaSize = tscGetTableMetaSize(pDataBlock->pTableMeta);
}
/*
@ -1255,6 +1256,8 @@ int32_t tscMergeTableDataBlocks(SSqlObj* pSql, bool freeBlockMap) {
STableDataBlocks* pOneTableBlock = *p;
while(pOneTableBlock) {
SSubmitBlk* pBlocks = (SSubmitBlk*) pOneTableBlock->pData;
if (pBlocks->numOfRows > 0) {
// the maximum expanded size in byte when a row-wise data is converted to SDataRow format
int32_t expandSize = getRowExpandSize(pOneTableBlock->pTableMeta);
STableDataBlocks* dataBuf = NULL;
@ -1268,7 +1271,6 @@ int32_t tscMergeTableDataBlocks(SSqlObj* pSql, bool freeBlockMap) {
return ret;
}
SSubmitBlk* pBlocks = (SSubmitBlk*) pOneTableBlock->pData;
int64_t destSize = dataBuf->size + pOneTableBlock->size + pBlocks->numOfRows * expandSize + sizeof(STColumn) * tscGetNumOfColumns(pOneTableBlock->pTableMeta);
if (dataBuf->nAllocSize < destSize) {
@ -1316,6 +1318,11 @@ int32_t tscMergeTableDataBlocks(SSqlObj* pSql, bool freeBlockMap) {
pBlocks->dataLen = htonl(finalLen);
dataBuf->numOfTables += 1;
pBlocks->numOfRows = 0;
}else {
tscDebug("0x%"PRIx64" table %s data block is empty", pSql->self, pOneTableBlock->tableName.tname);
}
p = taosHashIterate(pCmd->pTableBlockHashList, p);
if (p == NULL) {
break;
@ -1437,7 +1444,7 @@ int16_t tscFieldInfoGetOffset(SQueryInfo* pQueryInfo, int32_t index) {
return pInfo->pExpr->base.offset;
}
int32_t tscFieldInfoCompare(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2) {
int32_t tscFieldInfoCompare(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2, int32_t *diffSize) {
assert(pFieldInfo1 != NULL && pFieldInfo2 != NULL);
if (pFieldInfo1->numOfOutput != pFieldInfo2->numOfOutput) {
@ -1449,15 +1456,36 @@ int32_t tscFieldInfoCompare(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFi
TAOS_FIELD* pField2 = tscFieldInfoGetField((SFieldInfo*) pFieldInfo2, i);
if (pField1->type != pField2->type ||
pField1->bytes != pField2->bytes ||
strcasecmp(pField1->name, pField2->name) != 0) {
return 1;
}
if (pField1->bytes != pField2->bytes) {
*diffSize = 1;
if (pField2->bytes > pField1->bytes) {
pField1->bytes = pField2->bytes;
}
}
}
return 0;
}
int32_t tscFieldInfoSetSize(const SFieldInfo* pFieldInfo1, const SFieldInfo* pFieldInfo2) {
assert(pFieldInfo1 != NULL && pFieldInfo2 != NULL);
for (int32_t i = 0; i < pFieldInfo1->numOfOutput; ++i) {
TAOS_FIELD* pField1 = tscFieldInfoGetField((SFieldInfo*) pFieldInfo1, i);
TAOS_FIELD* pField2 = tscFieldInfoGetField((SFieldInfo*) pFieldInfo2, i);
pField2->bytes = pField1->bytes;
}
return 0;
}
int32_t tscGetResRowLength(SArray* pExprList) {
size_t num = taosArrayGetSize(pExprList);
if (num == 0) {
@ -1867,7 +1895,7 @@ void tscColumnListDestroy(SArray* pColumnList) {
static int32_t validateQuoteToken(SStrToken* pToken) {
tscDequoteAndTrimToken(pToken);
int32_t k = tSQLGetToken(pToken->z, &pToken->type);
int32_t k = tGetToken(pToken->z, &pToken->type);
if (pToken->type == TK_STRING) {
return tscValidateName(pToken);
@ -1935,7 +1963,7 @@ int32_t tscValidateName(SStrToken* pToken) {
tscStrToLower(pToken->z, pToken->n);
//pToken->n = (uint32_t)strtrim(pToken->z);
int len = tSQLGetToken(pToken->z, &pToken->type);
int len = tGetToken(pToken->z, &pToken->type);
// single token, validate it
if (len == pToken->n) {
@ -1961,7 +1989,7 @@ int32_t tscValidateName(SStrToken* pToken) {
pToken->n = (uint32_t)strtrim(pToken->z);
}
pToken->n = tSQLGetToken(pToken->z, &pToken->type);
pToken->n = tGetToken(pToken->z, &pToken->type);
if (pToken->z[pToken->n] != TS_PATH_DELIMITER[0]) {
return TSDB_CODE_TSC_INVALID_SQL;
}
@ -1978,7 +2006,7 @@ int32_t tscValidateName(SStrToken* pToken) {
pToken->z = sep + 1;
pToken->n = (uint32_t)(oldLen - (sep - pStr) - 1);
int32_t len = tSQLGetToken(pToken->z, &pToken->type);
int32_t len = tGetToken(pToken->z, &pToken->type);
if (len != pToken->n || (pToken->type != TK_STRING && pToken->type != TK_ID)) {
return TSDB_CODE_TSC_INVALID_SQL;
}
@ -2431,6 +2459,11 @@ STableMetaInfo* tscAddTableMetaInfo(SQueryInfo* pQueryInfo, SName* name, STableM
}
pTableMetaInfo->pTableMeta = pTableMeta;
if (pTableMetaInfo->pTableMeta == NULL) {
pTableMetaInfo->tableMetaSize = 0;
} else {
pTableMetaInfo->tableMetaSize = tscGetTableMetaSize(pTableMeta);
}
if (vgroupList != NULL) {
pTableMetaInfo->vgroupList = tscVgroupInfoClone(vgroupList);
@ -2706,6 +2739,7 @@ SSqlObj* createSubqueryObj(SSqlObj* pSql, int16_t tableIndex, __async_cb_func_t
pFinalInfo = tscAddTableMetaInfo(pNewQueryInfo, &pTableMetaInfo->name, pTableMeta, pTableMetaInfo->vgroupList,
pTableMetaInfo->tagColList, pTableMetaInfo->pVgroupTables);
} else { // transfer the ownership of pTableMeta to the newly create sql object.
STableMetaInfo* pPrevInfo = tscGetTableMetaInfoFromCmd(&pPrevSql->cmd, pPrevSql->cmd.clauseIndex, 0);
if (pPrevInfo->pTableMeta && pPrevInfo->pTableMeta->tableType < 0) {
@ -3100,7 +3134,13 @@ void tscTryQueryNextClause(SSqlObj* pSql, __async_cb_func_t fp) {
//backup the total number of result first
int64_t num = pRes->numOfTotal + pRes->numOfClauseTotal;
// DON't free final since it may be recoreded and used later in APP
TAOS_FIELD* finalBk = pRes->final;
pRes->final = NULL;
tscFreeSqlResult(pSql);
pRes->final = finalBk;
pRes->numOfTotal = num;
@ -3333,11 +3373,11 @@ CChildTableMeta* tscCreateChildMeta(STableMeta* pTableMeta) {
return cMeta;
}
int32_t tscCreateTableMetaFromCChildMeta(STableMeta* pChild, const char* name) {
assert(pChild != NULL);
int32_t tscCreateTableMetaFromCChildMeta(STableMeta* pChild, const char* name, void* buf) {
assert(pChild != NULL && buf != NULL);
uint32_t size = tscGetTableMetaMaxSize();
STableMeta* p = calloc(1, size);
// uint32_t size = tscGetTableMetaMaxSize();
STableMeta* p = buf;//calloc(1, size);
taosHashGetClone(tscTableMetaInfo, pChild->sTableName, strnlen(pChild->sTableName, TSDB_TABLE_FNAME_LEN), NULL, p, -1);
if (p->id.uid > 0) { // tableMeta exists, build child table meta and return
@ -3349,12 +3389,12 @@ int32_t tscCreateTableMetaFromCChildMeta(STableMeta* pChild, const char* name) {
memcpy(pChild->schema, p->schema, sizeof(SSchema) *total);
tfree(p);
// tfree(p);
return TSDB_CODE_SUCCESS;
} else { // super table has been removed, current tableMeta is also expired. remove it here
taosHashRemove(tscTableMetaInfo, name, strnlen(name, TSDB_TABLE_FNAME_LEN));
tfree(p);
// tfree(p);
return -1;
}
}

View File

@ -4,7 +4,7 @@
#include <iostream>
#include "taos.h"
#include "tstoken.h"
#include "ttoken.h"
#include "tutil.h"
int main(int argc, char** argv) {

View File

@ -80,6 +80,7 @@ enum {
TSDB_DEFINE_SQL_TYPE( TSDB_SQL_TABLE_JOIN_RETRIEVE, "join-retrieve" )
TSDB_DEFINE_SQL_TYPE( TSDB_SQL_SHOW_CREATE_TABLE, "show-create-table")
TSDB_DEFINE_SQL_TYPE( TSDB_SQL_SHOW_CREATE_STABLE, "show-create-stable")
TSDB_DEFINE_SQL_TYPE( TSDB_SQL_SHOW_CREATE_DATABASE, "show-create-database")
/*

View File

@ -89,9 +89,6 @@ tExprNode* exprdup(tExprNode* pTree);
bool exprTreeApplyFilter(tExprNode *pExpr, const void *pItem, SExprTraverseSupp *param);
typedef void (*_arithmetic_operator_fn_t)(void *left, int32_t numLeft, int32_t leftType, void *right, int32_t numRight,
int32_t rightType, void *output, int32_t order);
void arithmeticTreeTraverse(tExprNode *pExprs, int32_t numOfRows, char *pOutput, void *param, int32_t order,
char *(*cb)(void *, const char*, int32_t));

View File

@ -18,7 +18,7 @@
#include "os.h"
#include "taosmsg.h"
#include "tstoken.h"
#include "ttoken.h"
#include "tvariant.h"
typedef struct SDataStatis {

View File

@ -16,8 +16,8 @@
#ifndef TDENGINE_TVARIANT_H
#define TDENGINE_TVARIANT_H
#include "tstoken.h"
#include "tarray.h"
#include "ttoken.h"
#ifdef __cplusplus
extern "C" {

View File

@ -2,7 +2,7 @@
#include "tutil.h"
#include "tname.h"
#include "tstoken.h"
#include "ttoken.h"
#include "tvariant.h"
#define VALIDNUMOFCOLS(x) ((x) >= TSDB_MIN_COLUMNS && (x) <= TSDB_MAX_COLUMNS)

View File

@ -14,14 +14,14 @@
*/
#include "os.h"
#include "tvariant.h"
#include "hash.h"
#include "taos.h"
#include "taosdef.h"
#include "tstoken.h"
#include "ttoken.h"
#include "ttokendef.h"
#include "tutil.h"
#include "ttype.h"
#include "tutil.h"
#include "tvariant.h"
void tVariantCreate(tVariant *pVar, SStrToken *token) {
int32_t ret = 0;
@ -49,7 +49,7 @@ void tVariantCreate(tVariant *pVar, SStrToken *token) {
ret = tStrToInteger(token->z, token->type, token->n, &pVar->i64, true);
if (ret != 0) {
SStrToken t = {0};
tSQLGetToken(token->z, &t.type);
tGetToken(token->z, &t.type);
if (t.type == TK_MINUS) { // it is a signed number which is greater than INT64_MAX or less than INT64_MIN
pVar->nType = -1; // -1 means error type
return;
@ -460,7 +460,7 @@ static FORCE_INLINE int32_t convertToInteger(tVariant *pVariant, int64_t *result
*result = (int64_t) pVariant->dKey;
} else if (pVariant->nType == TSDB_DATA_TYPE_BINARY) {
SStrToken token = {.z = pVariant->pz, .n = pVariant->nLen};
/*int32_t n = */tSQLGetToken(pVariant->pz, &token.type);
/*int32_t n = */tGetToken(pVariant->pz, &token.type);
if (token.type == TK_NULL) {
if (releaseVariantPtr) {
@ -495,10 +495,10 @@ static FORCE_INLINE int32_t convertToInteger(tVariant *pVariant, int64_t *result
wchar_t *endPtr = NULL;
SStrToken token = {0};
token.n = tSQLGetToken(pVariant->pz, &token.type);
token.n = tGetToken(pVariant->pz, &token.type);
if (token.type == TK_MINUS || token.type == TK_PLUS) {
token.n = tSQLGetToken(pVariant->pz + token.n, &token.type);
token.n = tGetToken(pVariant->pz + token.n, &token.type);
}
if (token.type == TK_FLOAT) {

View File

@ -8,7 +8,7 @@ IF (TD_MVN_INSTALLED)
ADD_CUSTOM_COMMAND(OUTPUT ${JDBC_CMD_NAME}
POST_BUILD
COMMAND mvn -Dmaven.test.skip=true install -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.28-dist.jar ${LIBRARY_OUTPUT_PATH}
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_SOURCE_DIR}/target/taos-jdbcdriver-2.0.29.jar ${LIBRARY_OUTPUT_PATH}
COMMAND mvn -Dmaven.test.skip=true clean -f ${CMAKE_CURRENT_SOURCE_DIR}/pom.xml
COMMENT "build jdbc driver")
ADD_CUSTOM_TARGET(${JDBC_TARGET_NAME} ALL WORKING_DIRECTORY ${EXECUTABLE_OUTPUT_PATH} DEPENDS ${JDBC_CMD_NAME})

View File

@ -5,7 +5,7 @@
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.28</version>
<version>2.0.29</version>
<packaging>jar</packaging>
<name>JDBCDriver</name>

View File

@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>com.taosdata.jdbc</groupId>
<artifactId>taos-jdbcdriver</artifactId>
<version>2.0.28</version>
<version>2.0.29</version>
<packaging>jar</packaging>
<name>JDBCDriver</name>
<url>https://github.com/taosdata/TDengine/tree/master/src/connector/jdbc</url>

View File

@ -84,9 +84,11 @@ public abstract class AbstractResultSet extends WrapperImpl implements ResultSet
}
@Override
@Deprecated
public InputStream getUnicodeStream(int columnIndex) throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_RESULTSET_CLOSED);
}
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
@ -171,6 +173,7 @@ public abstract class AbstractResultSet extends WrapperImpl implements ResultSet
}
@Override
@Deprecated
public InputStream getUnicodeStream(String columnLabel) throws SQLException {
return getUnicodeStream(findColumn(columnLabel));
}

View File

@ -49,7 +49,7 @@ public class TSDBConnection extends AbstractConnection {
this.databaseMetaData.setConnection(this);
}
public TSDBJNIConnector getConnection() {
public TSDBJNIConnector getConnector() {
return this.connector;
}
@ -58,7 +58,7 @@ public class TSDBConnection extends AbstractConnection {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_CONNECTION_CLOSED);
}
return new TSDBStatement(this, this.connector);
return new TSDBStatement(this);
}
public TSDBSubscribe subscribe(String topic, String sql, boolean restart) throws SQLException {
@ -74,14 +74,18 @@ public class TSDBConnection extends AbstractConnection {
}
public PreparedStatement prepareStatement(String sql) throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_CONNECTION_CLOSED);
return new TSDBPreparedStatement(this, this.connector, sql);
}
return new TSDBPreparedStatement(this, sql);
}
public void close() throws SQLException {
if (isClosed)
if (isClosed) {
return;
}
this.connector.closeConnection();
this.isClosed = true;
}

View File

@ -104,7 +104,7 @@ public class TSDBDriver extends AbstractDriver {
static {
try {
java.sql.DriverManager.registerDriver(new TSDBDriver());
DriverManager.registerDriver(new TSDBDriver());
} catch (SQLException e) {
throw TSDBError.createRuntimeException(TSDBErrorNumbers.ERROR_CANNOT_REGISTER_JNI_DRIVER, e);
}

View File

@ -18,6 +18,7 @@ package com.taosdata.jdbc;
import com.taosdata.jdbc.utils.TaosInfo;
import java.nio.ByteBuffer;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.util.List;
@ -29,10 +30,13 @@ public class TSDBJNIConnector {
private static volatile Boolean isInitialized = false;
private TaosInfo taosInfo = TaosInfo.getInstance();
// Connection pointer used in C
private long taos = TSDBConstants.JNI_NULL_POINTER;
// result set status in current connection
private boolean isResultsetClosed = true;
private boolean isResultsetClosed;
private int affectedRows = -1;
static {
@ -75,7 +79,6 @@ public class TSDBJNIConnector {
public boolean connect(String host, int port, String dbName, String user, String password) throws SQLException {
if (this.taos != TSDBConstants.JNI_NULL_POINTER) {
// this.closeConnectionImp(this.taos);
closeConnection();
this.taos = TSDBConstants.JNI_NULL_POINTER;
}
@ -97,12 +100,6 @@ public class TSDBJNIConnector {
* @throws SQLException
*/
public long executeQuery(String sql) throws SQLException {
// close previous result set if the user forgets to invoke the
// free method to close previous result set.
// if (!this.isResultsetClosed) {
// freeResultSet(taosResultSetPointer);
// }
Long pSql = 0l;
try {
pSql = this.executeQueryImp(sql.getBytes(TaosGlobalConfig.getCharset()), this.taos);
@ -135,6 +132,7 @@ public class TSDBJNIConnector {
// Try retrieving result set for the executed SQL using the current connection pointer.
pSql = this.getResultSetImp(this.taos, pSql);
// if pSql == 0L that means resultset is closed
isResultsetClosed = (pSql == TSDBConstants.JNI_NULL_POINTER);
return pSql;
@ -172,34 +170,11 @@ public class TSDBJNIConnector {
* Free result set operation from C to release result set pointer by JNI
*/
public int freeResultSet(long pSql) {
int res = TSDBConstants.JNI_SUCCESS;
// if (result != taosResultSetPointer && taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
// throw new RuntimeException("Invalid result set pointer");
// }
// if (taosResultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
res = this.freeResultSetImp(this.taos, pSql);
// taosResultSetPointer = TSDBConstants.JNI_NULL_POINTER;
// }
int res = this.freeResultSetImp(this.taos, pSql);
isResultsetClosed = true;
return res;
}
/**
* Close the open result set which is associated to the current connection. If the result set is already
* closed, return 0 for success.
*/
// public int freeResultSet() {
// int resCode = TSDBConstants.JNI_SUCCESS;
// if (!isResultsetClosed) {
// resCode = this.freeResultSetImp(this.taos, this.taosResultSetPointer);
// taosResultSetPointer = TSDBConstants.JNI_NULL_POINTER;
// isResultsetClosed = true;
// }
// return resCode;
// }
private native int freeResultSetImp(long connection, long result);
/**
@ -246,6 +221,7 @@ public class TSDBJNIConnector {
*/
public void closeConnection() throws SQLException {
int code = this.closeConnectionImp(this.taos);
if (code < 0) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_JNI_CONNECTION_NULL);
} else if (code == 0) {
@ -253,6 +229,7 @@ public class TSDBJNIConnector {
} else {
throw new SQLException("Undefined error code returned by TDengine when closing a connection");
}
// invoke closeConnectionImpl only here
taosInfo.connect_close_increment();
}
@ -289,7 +266,7 @@ public class TSDBJNIConnector {
private native void unsubscribeImp(long subscription, boolean isKeep);
/**
* Validate if a <I>create table</I> sql statement is correct without actually creating that table
* Validate if a <I>create table</I> SQL statement is correct without actually creating that table
*/
public boolean validateCreateTableSql(String sql) {
int res = validateCreateTableSqlImp(taos, sql.getBytes());
@ -297,4 +274,66 @@ public class TSDBJNIConnector {
}
private native int validateCreateTableSqlImp(long connection, byte[] sqlBytes);
public long prepareStmt(String sql) throws SQLException {
Long stmt = 0L;
try {
stmt = prepareStmtImp(sql.getBytes(), this.taos);
} catch (Exception e) {
e.printStackTrace();
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_ENCODING);
}
if (stmt == TSDBConstants.JNI_CONNECTION_NULL) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_JNI_CONNECTION_NULL);
}
if (stmt == TSDBConstants.JNI_SQL_NULL) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_JNI_SQL_NULL);
}
if (stmt == TSDBConstants.JNI_OUT_OF_MEMORY) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_JNI_OUT_OF_MEMORY);
}
return stmt;
}
private native long prepareStmtImp(byte[] sql, long con);
public void setBindTableName(long stmt, String tableName) throws SQLException {
int code = setBindTableNameImp(stmt, tableName, this.taos);
if (code != TSDBConstants.JNI_SUCCESS) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "failed to set table name");
}
}
private native int setBindTableNameImp(long stmt, String name, long conn);
public void bindColumnDataArray(long stmt, ByteBuffer colDataList, ByteBuffer lengthList, ByteBuffer isNullList, int type, int bytes, int numOfRows,int columnIndex) throws SQLException {
int code = bindColDataImp(stmt, colDataList.array(), lengthList.array(), isNullList.array(), type, bytes, numOfRows, columnIndex, this.taos);
if (code != TSDBConstants.JNI_SUCCESS) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "failed to bind column data");
}
}
private native int bindColDataImp(long stmt, byte[] colDataList, byte[] lengthList, byte[] isNullList, int type, int bytes, int numOfRows, int columnIndex, long conn);
public void executeBatch(long stmt) throws SQLException {
int code = executeBatchImp(stmt, this.taos);
if (code != TSDBConstants.JNI_SUCCESS) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "failed to execute batch bind");
}
}
private native int executeBatchImp(long stmt, long con);
public void closeBatch(long stmt) throws SQLException {
int code = closeStmt(stmt, this.taos);
if (code != TSDBConstants.JNI_SUCCESS) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "failed to close batch bind");
}
}
private native int closeStmt(long stmt, long con);
}

View File

@ -18,33 +18,40 @@ import com.taosdata.jdbc.utils.Utils;
import java.io.InputStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.sql.*;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/*
* TDengine only supports a subset of the standard SQL, thus this implemetation of the
* TDengine only supports a subset of the standard SQL, thus this implementation of the
* standard JDBC API contains more or less some adjustments customized for certain
* compatibility needs.
*/
public class TSDBPreparedStatement extends TSDBStatement implements PreparedStatement {
private String rawSql;
private Object[] parameters;
private boolean isPrepared;
private ArrayList<ColumnInfo> colData;
private String tableName;
private long nativeStmtHandle = 0;
private volatile TSDBParameterMetaData parameterMetaData;
TSDBPreparedStatement(TSDBConnection connection, TSDBJNIConnector connecter, String sql) {
super(connection, connecter);
TSDBPreparedStatement(TSDBConnection connection, String sql) {
super(connection);
init(sql);
if (sql.contains("?")) {
int parameterCnt = 0;
if (sql.contains("?")) {
for (int i = 0; i < sql.length(); i++) {
if ('?' == sql.charAt(i)) {
parameterCnt++;
@ -53,6 +60,12 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
parameters = new Object[parameterCnt];
this.isPrepared = true;
}
if (parameterCnt > 1) {
// the table name is also a parameter, so ignore it.
this.colData = new ArrayList<ColumnInfo>(parameterCnt - 1);
this.colData.addAll(Collections.nCopies(parameterCnt - 1, null));
}
}
private void init(String sql) {
@ -260,10 +273,14 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
if (parameterIndex < 1 && parameterIndex >= parameters.length)
}
if (parameterIndex < 1 && parameterIndex >= parameters.length) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_PARAMETER_INDEX_OUT_RANGE);
}
parameters[parameterIndex - 1] = x;
}
@ -300,8 +317,9 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
@Override
public void setRef(int parameterIndex, Ref x) throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
}
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
@ -515,4 +533,276 @@ public class TSDBPreparedStatement extends TSDBStatement implements PreparedStat
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNSUPPORTED_METHOD);
}
///////////////////////////////////////////////////////////////////////
// NOTE: the following APIs are not JDBC compatible
// set the bind table name
private static class ColumnInfo {
@SuppressWarnings("rawtypes")
private ArrayList data;
private int type;
private int bytes;
private boolean typeIsSet;
public ColumnInfo() {
this.typeIsSet = false;
}
public void setType(int type) throws SQLException {
if (this.isTypeSet()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "column data type has been set");
}
this.typeIsSet = true;
this.type = type;
}
public boolean isTypeSet() {
return this.typeIsSet;
}
};
public void setTableName(String name) {
this.tableName = name;
}
public <T> void setValueImpl(int columnIndex, ArrayList<T> list, int type, int bytes) throws SQLException {
ColumnInfo col = (ColumnInfo) this.colData.get(columnIndex);
if (col == null) {
ColumnInfo p = new ColumnInfo();
p.setType(type);
p.bytes = bytes;
p.data = (ArrayList<?>) list.clone();
this.colData.set(columnIndex, p);
} else {
if (col.type != type) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "column data type mismatch");
}
col.data.addAll(list);
}
}
public void setInt(int columnIndex, ArrayList<Integer> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_INT, Integer.BYTES);
}
public void setFloat(int columnIndex, ArrayList<Float> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_FLOAT, Float.BYTES);
}
public void setTimestamp(int columnIndex, ArrayList<Long> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_TIMESTAMP, Long.BYTES);
}
public void setLong(int columnIndex, ArrayList<Long> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_BIGINT, Long.BYTES);
}
public void setDouble(int columnIndex, ArrayList<Double> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_DOUBLE, Double.BYTES);
}
public void setBoolean(int columnIndex, ArrayList<Boolean> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_BOOL, Byte.BYTES);
}
public void setByte(int columnIndex, ArrayList<Byte> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_TINYINT, Byte.BYTES);
}
public void setShort(int columnIndex, ArrayList<Short> list) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_SMALLINT, Short.BYTES);
}
public void setString(int columnIndex, ArrayList<String> list, int size) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_BINARY, size);
}
// note: expand the required space for each NChar character
public void setNString(int columnIndex, ArrayList<String> list, int size) throws SQLException {
setValueImpl(columnIndex, list, TSDBConstants.TSDB_DATA_TYPE_NCHAR, size * Integer.BYTES);
}
public void columnDataAddBatch() throws SQLException {
// pass the data block to native code
if (rawSql == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "sql statement not set yet");
}
// table name is not set yet, abort
if (this.tableName == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "table name not set yet");
}
int numOfCols = this.colData.size();
if (numOfCols == 0) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "column data not bind");
}
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
this.nativeStmtHandle = connector.prepareStmt(rawSql);
connector.setBindTableName(this.nativeStmtHandle, this.tableName);
ColumnInfo colInfo = (ColumnInfo) this.colData.get(0);
if (colInfo == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "column data not bind");
}
int rows = colInfo.data.size();
for (int i = 0; i < numOfCols; ++i) {
ColumnInfo col1 = this.colData.get(i);
if (col1 == null || !col1.isTypeSet()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "column data not bind");
}
if (rows != col1.data.size()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "the rows in column data not identical");
}
ByteBuffer colDataList = ByteBuffer.allocate(rows * col1.bytes);
colDataList.order(ByteOrder.LITTLE_ENDIAN);
ByteBuffer lengthList = ByteBuffer.allocate(rows * Integer.BYTES);
lengthList.order(ByteOrder.LITTLE_ENDIAN);
ByteBuffer isNullList = ByteBuffer.allocate(rows * Byte.BYTES);
isNullList.order(ByteOrder.LITTLE_ENDIAN);
switch (col1.type) {
case TSDBConstants.TSDB_DATA_TYPE_INT: {
for (int j = 0; j < rows; ++j) {
Integer val = (Integer) col1.data.get(j);
colDataList.putInt(val == null? Integer.MIN_VALUE:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_TINYINT: {
for (int j = 0; j < rows; ++j) {
Byte val = (Byte) col1.data.get(j);
colDataList.put(val == null? 0:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_BOOL: {
for (int j = 0; j < rows; ++j) {
Boolean val = (Boolean) col1.data.get(j);
if (val == null) {
colDataList.put((byte) 0);
} else {
colDataList.put((byte) (val? 1:0));
}
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_SMALLINT: {
for (int j = 0; j < rows; ++j) {
Short val = (Short) col1.data.get(j);
colDataList.putShort(val == null? 0:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_TIMESTAMP:
case TSDBConstants.TSDB_DATA_TYPE_BIGINT: {
for (int j = 0; j < rows; ++j) {
Long val = (Long) col1.data.get(j);
colDataList.putLong(val == null? 0:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_FLOAT: {
for (int j = 0; j < rows; ++j) {
Float val = (Float) col1.data.get(j);
colDataList.putFloat(val == null? 0:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_DOUBLE: {
for (int j = 0; j < rows; ++j) {
Double val = (Double) col1.data.get(j);
colDataList.putDouble(val == null? 0:val);
isNullList.put((byte) (val == null? 1:0));
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_NCHAR:
case TSDBConstants.TSDB_DATA_TYPE_BINARY: {
String charset = TaosGlobalConfig.getCharset();
for (int j = 0; j < rows; ++j) {
String val = (String) col1.data.get(j);
colDataList.position(j * col1.bytes); // seek to the correct position
if (val != null) {
byte[] b = null;
try {
if (col1.type == TSDBConstants.TSDB_DATA_TYPE_BINARY) {
b = val.getBytes();
} else {
b = val.getBytes(charset);
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
if (val.length() > col1.bytes) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "string data too long");
}
colDataList.put(b);
lengthList.putInt(b.length);
isNullList.put((byte) 0);
} else {
lengthList.putInt(0);
isNullList.put((byte) 1);
}
}
break;
}
case TSDBConstants.TSDB_DATA_TYPE_UTINYINT:
case TSDBConstants.TSDB_DATA_TYPE_USMALLINT:
case TSDBConstants.TSDB_DATA_TYPE_UINT:
case TSDBConstants.TSDB_DATA_TYPE_UBIGINT: {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_UNKNOWN, "not support data types");
}
};
connector.bindColumnDataArray(this.nativeStmtHandle, colDataList, lengthList, isNullList, col1.type, col1.bytes, rows, i);
}
}
public void columnDataExecuteBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.executeBatch(this.nativeStmtHandle);
this.columnDataClearBatch();
}
public void columnDataClearBatch() {
int size = this.colData.size();
this.colData.clear();
this.colData.addAll(Collections.nCopies(size, null));
this.tableName = null; // clear the table name
}
public void columnDataCloseBatch() throws SQLException {
TSDBJNIConnector connector = ((TSDBConnection) this.getConnection()).getConnector();
connector.closeBatch(this.nativeStmtHandle);
this.nativeStmtHandle = 0L;
this.tableName = null;
}
}

View File

@ -109,6 +109,8 @@ public class TSDBResultSet extends AbstractResultSet implements ResultSet {
public void close() throws SQLException {
if (isClosed)
return;
if (this.statement == null)
return;
if (this.jniConnector != null) {
int code = this.jniConnector.freeResultSet(this.resultSetPointer);
if (code == TSDBConstants.JNI_CONNECTION_NULL) {
@ -461,12 +463,13 @@ public class TSDBResultSet extends AbstractResultSet implements ResultSet {
}
public boolean isClosed() throws SQLException {
if (isClosed)
return true;
if (jniConnector != null) {
isClosed = jniConnector.isResultsetClosed();
}
return isClosed;
// if (isClosed)
// return true;
// if (jniConnector != null) {
// isClosed = jniConnector.isResultsetClosed();
// }
// return isClosed;
}
public String getNString(int columnIndex) throws SQLException {

View File

@ -29,6 +29,8 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.taosdata.jdbc.utils.NullType;
public class TSDBResultSetBlockData {
private int numOfRows = 0;
private int rowIndex = 0;
@ -164,59 +166,7 @@ public class TSDBResultSetBlockData {
}
}
private static class NullType {
private static final byte NULL_BOOL_VAL = 0x2;
private static final String NULL_STR = "null";
public String toString() {
return NullType.NULL_STR;
}
public static boolean isBooleanNull(byte val) {
return val == NullType.NULL_BOOL_VAL;
}
private static boolean isTinyIntNull(byte val) {
return val == Byte.MIN_VALUE;
}
private static boolean isSmallIntNull(short val) {
return val == Short.MIN_VALUE;
}
private static boolean isIntNull(int val) {
return val == Integer.MIN_VALUE;
}
private static boolean isBigIntNull(long val) {
return val == Long.MIN_VALUE;
}
private static boolean isFloatNull(float val) {
return Float.isNaN(val);
}
private static boolean isDoubleNull(double val) {
return Double.isNaN(val);
}
private static boolean isBinaryNull(byte[] val, int length) {
if (length != Byte.BYTES) {
return false;
}
return val[0] == 0xFF;
}
private static boolean isNcharNull(byte[] val, int length) {
if (length != Integer.BYTES) {
return false;
}
return (val[0] & val[1] & val[2] & val[3]) == 0xFF;
}
}
/**
* The original type may not be a string type, but will be converted to by
@ -488,8 +438,8 @@ public class TSDBResultSetBlockData {
}
try {
String ss = TaosGlobalConfig.getCharset();
return new String(dest, ss);
String charset = TaosGlobalConfig.getCharset();
return new String(dest, charset);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}

View File

@ -84,6 +84,7 @@ public class TSDBResultSetRowData {
data.set(col, value);
}
@SuppressWarnings("deprecation")
public int getInt(int col, int srcType) throws SQLException {
Object obj = data.get(col);
@ -128,7 +129,7 @@ public class TSDBResultSetRowData {
long value = (long) obj;
if (value < 0)
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_NUMERIC_VALUE_OUT_OF_RANGE);
return new Long(value).intValue();
return Long.valueOf(value).intValue();
}
}

View File

@ -19,8 +19,6 @@ import java.sql.ResultSet;
import java.sql.SQLException;
public class TSDBStatement extends AbstractStatement {
private TSDBJNIConnector connector;
/**
* Status of current statement
*/
@ -29,29 +27,26 @@ public class TSDBStatement extends AbstractStatement {
private TSDBConnection connection;
private TSDBResultSet resultSet;
public void setConnection(TSDBConnection connection) {
TSDBStatement(TSDBConnection connection) {
this.connection = connection;
}
TSDBStatement(TSDBConnection connection, TSDBJNIConnector connector) {
this.connection = connection;
this.connector = connector;
}
public ResultSet executeQuery(String sql) throws SQLException {
// check if closed
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
}
//TODO: 如果在executeQuery方法中执行insert语句那么先执行了SQL再通过pSql来检查是否为一个insert语句但这个insert SQL已经执行成功了
// execute query
long pSql = this.connector.executeQuery(sql);
long pSql = this.connection.getConnector().executeQuery(sql);
// if pSql is create/insert/update/delete/alter SQL
if (this.connector.isUpdateQuery(pSql)) {
this.connector.freeResultSet(pSql);
if (this.connection.getConnector().isUpdateQuery(pSql)) {
this.connection.getConnector().freeResultSet(pSql);
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_INVALID_WITH_EXECUTEQUERY);
}
TSDBResultSet res = new TSDBResultSet(this, this.connector, pSql);
TSDBResultSet res = new TSDBResultSet(this, this.connection.getConnector(), pSql);
res.setBatchFetch(this.connection.getBatchFetch());
return res;
}
@ -60,14 +55,14 @@ public class TSDBStatement extends AbstractStatement {
if (isClosed())
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
long pSql = this.connector.executeQuery(sql);
long pSql = this.connection.getConnector().executeQuery(sql);
// if pSql is create/insert/update/delete/alter SQL
if (!this.connector.isUpdateQuery(pSql)) {
this.connector.freeResultSet(pSql);
if (!this.connection.getConnector().isUpdateQuery(pSql)) {
this.connection.getConnector().freeResultSet(pSql);
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_INVALID_WITH_EXECUTEUPDATE);
}
int affectedRows = this.connector.getAffectedRows(pSql);
this.connector.freeResultSet(pSql);
int affectedRows = this.connection.getConnector().getAffectedRows(pSql);
this.connection.getConnector().freeResultSet(pSql);
return affectedRows;
}
@ -81,30 +76,29 @@ public class TSDBStatement extends AbstractStatement {
public boolean execute(String sql) throws SQLException {
// check if closed
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
}
// execute query
long pSql = this.connector.executeQuery(sql);
long pSql = this.connection.getConnector().executeQuery(sql);
// if pSql is create/insert/update/delete/alter SQL
if (this.connector.isUpdateQuery(pSql)) {
this.affectedRows = this.connector.getAffectedRows(pSql);
this.connector.freeResultSet(pSql);
if (this.connection.getConnector().isUpdateQuery(pSql)) {
this.affectedRows = this.connection.getConnector().getAffectedRows(pSql);
this.connection.getConnector().freeResultSet(pSql);
return false;
}
this.resultSet = new TSDBResultSet(this, this.connector, pSql);
this.resultSet = new TSDBResultSet(this, this.connection.getConnector(), pSql);
this.resultSet.setBatchFetch(this.connection.getBatchFetch());
return true;
}
public ResultSet getResultSet() throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
// long resultSetPointer = connector.getResultSet();
// TSDBResultSet resSet = null;
// if (resultSetPointer != TSDBConstants.JNI_NULL_POINTER) {
// resSet = new TSDBResultSet(connector, resultSetPointer);
// }
}
return this.resultSet;
}
@ -115,13 +109,21 @@ public class TSDBStatement extends AbstractStatement {
}
public Connection getConnection() throws SQLException {
if (isClosed())
if (isClosed()) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_STATEMENT_CLOSED);
if (this.connector == null)
}
if (this.connection.getConnector() == null) {
throw TSDBError.createSQLException(TSDBErrorNumbers.ERROR_JNI_CONNECTION_NULL);
}
return this.connection;
}
public void setConnection(TSDBConnection connection) {
this.connection = connection;
}
public boolean isClosed() throws SQLException {
return isClosed;
}

View File

@ -17,7 +17,7 @@ public class RestfulDriver extends AbstractDriver {
static {
try {
java.sql.DriverManager.registerDriver(new RestfulDriver());
DriverManager.registerDriver(new RestfulDriver());
} catch (SQLException e) {
throw TSDBError.createRuntimeException(TSDBErrorNumbers.ERROR_URL_NOT_SET, e);
}

View File

@ -1,26 +1,15 @@
package com.taosdata.jdbc.rs;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
import com.taosdata.jdbc.TSDBError;
import com.taosdata.jdbc.TSDBErrorNumbers;
import com.taosdata.jdbc.utils.SqlSyntaxValidator;
import com.taosdata.jdbc.utils.Utils;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.nio.charset.Charset;
import java.sql.*;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class RestfulPreparedStatement extends RestfulStatement implements PreparedStatement {

View File

@ -0,0 +1,91 @@
package com.taosdata.jdbc.utils;
public class NullType {
private static final byte NULL_BOOL_VAL = 0x2;
private static final String NULL_STR = "null";
public String toString() {
return NullType.NULL_STR;
}
public static boolean isBooleanNull(byte val) {
return val == NullType.NULL_BOOL_VAL;
}
public static boolean isTinyIntNull(byte val) {
return val == Byte.MIN_VALUE;
}
public static boolean isSmallIntNull(short val) {
return val == Short.MIN_VALUE;
}
public static boolean isIntNull(int val) {
return val == Integer.MIN_VALUE;
}
public static boolean isBigIntNull(long val) {
return val == Long.MIN_VALUE;
}
public static boolean isFloatNull(float val) {
return Float.isNaN(val);
}
public static boolean isDoubleNull(double val) {
return Double.isNaN(val);
}
public static boolean isBinaryNull(byte[] val, int length) {
if (length != Byte.BYTES) {
return false;
}
return val[0] == 0xFF;
}
public static boolean isNcharNull(byte[] val, int length) {
if (length != Integer.BYTES) {
return false;
}
return (val[0] & val[1] & val[2] & val[3]) == 0xFF;
}
public static byte getBooleanNull() {
return NullType.NULL_BOOL_VAL;
}
public static byte getTinyintNull() {
return Byte.MIN_VALUE;
}
public static int getIntNull() {
return Integer.MIN_VALUE;
}
public static short getSmallIntNull() {
return Short.MIN_VALUE;
}
public static long getBigIntNull() {
return Long.MIN_VALUE;
}
public static int getFloatNull() {
return 0x7FF00000;
}
public static long getDoubleNull() {
return 0x7FFFFF0000000000L;
}
public static byte getBinaryNull() {
return (byte) 0xFF;
}
public static byte[] getNcharNull() {
return new byte[] {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF};
}
}

View File

@ -67,7 +67,7 @@ public class Utils {
findPlaceholderPosition(preparedSql, placeholderPositions);
findClauseRangeSet(preparedSql, clause, clauseRangeSet);
return transformSql(preparedSql, parameters, placeholderPositions, clauseRangeSet);
return transformSql(rawSql, parameters, placeholderPositions, clauseRangeSet);
}
private static void findClauseRangeSet(String preparedSql, String[] regexArr, RangeSet<Integer> clauseRangeSet) {
@ -95,14 +95,15 @@ public class Utils {
/***
*
* @param preparedSql
* @param rawSql
* @param paramArr
* @param placeholderPosition
* @param clauseRangeSet
* @return
*/
private static String transformSql(String preparedSql, Object[] paramArr, Map<Integer, Integer> placeholderPosition, RangeSet<Integer> clauseRangeSet) {
String[] sqlArr = preparedSql.split("\\?");
private static String transformSql(String rawSql, Object[] paramArr, Map<Integer, Integer> placeholderPosition, RangeSet<Integer> clauseRangeSet) {
String[] sqlArr = rawSql.split("\\?");
return IntStream.range(0, sqlArr.length).mapToObj(index -> {
if (index == paramArr.length)
return sqlArr[index];

View File

@ -1,6 +1,7 @@
package com.taosdata.jdbc;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -12,17 +13,61 @@ import java.util.Properties;
import java.util.concurrent.TimeUnit;
public class SubscribeTest {
Connection connection;
Statement statement;
String dbName = "test";
String tName = "t0";
String host = "127.0.0.1";
String topic = "test";
private long ts;
@Test
public void subscribe() {
try {
String rawSql = "select * from " + dbName + "." + tName + ";";
TSDBConnection conn = connection.unwrap(TSDBConnection.class);
TSDBSubscribe subscribe = conn.subscribe(topic, rawSql, false);
for (int j = 0; j < 10; j++) {
TimeUnit.SECONDS.sleep(1);
TSDBResultSet resSet = subscribe.consume();
int rowCnt = 0;
while (resSet.next()) {
if (rowCnt == 0) {
long cur_ts = resSet.getTimestamp(1).getTime();
int k = resSet.getInt(2);
int v = resSet.getInt(3);
Assert.assertEquals(ts, cur_ts);
Assert.assertEquals(100, k);
Assert.assertEquals(1, v);
}
if (rowCnt == 1) {
long cur_ts = resSet.getTimestamp(1).getTime();
int k = resSet.getInt(2);
int v = resSet.getInt(3);
Assert.assertEquals(ts + 1, cur_ts);
Assert.assertEquals(101, k);
Assert.assertEquals(2, v);
}
rowCnt++;
}
if (j == 0)
Assert.assertEquals(2, rowCnt);
resSet.close();
}
subscribe.close(true);
} catch (SQLException | InterruptedException throwables) {
throwables.printStackTrace();
}
}
@Before
public void createDatabase() {
try {
Class.forName("com.taosdata.jdbc.TSDBDriver");
public void createDatabase() throws SQLException {
Properties properties = new Properties();
properties.setProperty(TSDBDriver.PROPERTY_KEY_CHARSET, "UTF-8");
properties.setProperty(TSDBDriver.PROPERTY_KEY_LOCALE, "en_US.UTF-8");
@ -33,46 +78,9 @@ public class SubscribeTest {
statement.execute("drop database if exists " + dbName);
statement.execute("create database if not exists " + dbName);
statement.execute("create table if not exists " + dbName + "." + tName + " (ts timestamp, k int, v int)");
long ts = System.currentTimeMillis();
for (int i = 0; i < 2; i++) {
ts += i;
String sql = "insert into " + dbName + "." + tName + " values (" + ts + ", " + (100 + i) + ", " + i + ")";
statement.executeUpdate(sql);
}
} catch (ClassNotFoundException | SQLException e) {
return;
}
}
@Test
public void subscribe() {
try {
String rawSql = "select * from " + dbName + "." + tName + ";";
System.out.println(rawSql);
// TSDBSubscribe subscribe = ((TSDBConnection) connection).subscribe(topic, rawSql, false);
// int a = 0;
// while (true) {
// TimeUnit.MILLISECONDS.sleep(1000);
// TSDBResultSet resSet = subscribe.consume();
// while (resSet.next()) {
// for (int i = 1; i <= resSet.getMetaData().getColumnCount(); i++) {
// System.out.printf(i + ": " + resSet.getString(i) + "\t");
// }
// System.out.println("\n======" + a + "==========");
// }
// a++;
// if (a >= 2) {
// break;
// }
// resSet.close();
// }
//
// subscribe.close(true);
} catch (Exception e) {
e.printStackTrace();
}
ts = System.currentTimeMillis();
statement.executeUpdate("insert into " + dbName + "." + tName + " values (" + ts + ", 100, 1)");
statement.executeUpdate("insert into " + dbName + "." + tName + " values (" + (ts + 1) + ", 101, 2)");
}
@After
@ -86,6 +94,5 @@ public class SubscribeTest {
} catch (SQLException e) {
e.printStackTrace();
}
}
}

View File

@ -3,7 +3,6 @@ package com.taosdata.jdbc;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import com.google.common.primitives.Shorts;
import com.taosdata.jdbc.rs.RestfulResultSet;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@ -177,6 +176,7 @@ public class TSDBResultSetTest {
rs.getAsciiStream("f1");
}
@SuppressWarnings("deprecation")
@Test(expected = SQLFeatureNotSupportedException.class)
public void getUnicodeStream() throws SQLException {
rs.getUnicodeStream("f1");
@ -326,7 +326,7 @@ public class TSDBResultSetTest {
@Test(expected = SQLFeatureNotSupportedException.class)
public void getRow() throws SQLException {
int row = rs.getRow();
rs.getRow();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@ -405,12 +405,12 @@ public class TSDBResultSetTest {
@Test(expected = SQLFeatureNotSupportedException.class)
public void updateByte() throws SQLException {
rs.updateByte(1, new Byte("0"));
rs.updateByte(1, (byte) 0);
}
@Test(expected = SQLFeatureNotSupportedException.class)
public void updateShort() throws SQLException {
rs.updateShort(1, new Short("0"));
rs.updateShort(1, (short) 0);
}
@Test(expected = SQLFeatureNotSupportedException.class)

View File

@ -12,7 +12,7 @@ public class InsertSpecialCharacterJniTest {
private static String tbname1 = "test";
private static String tbname2 = "weather";
private static String special_character_str_1 = "$asd$$fsfsf$";
private static String special_character_str_2 = "\\asdfsfsf\\\\";
private static String special_character_str_2 = "\\\\asdfsfsf\\\\";
private static String special_character_str_3 = "\\\\asdfsfsf\\";
private static String special_character_str_4 = "?asd??fsf?sf?";
private static String special_character_str_5 = "?#sd@$f(('<(s[P)>\"){]}f?s[]{}%vaew|\"fsfs^a&d*jhg)(j))(f@~!?$";
@ -70,7 +70,7 @@ public class InsertSpecialCharacterJniTest {
String f1 = new String(rs.getBytes(2));
//TODO: bug to be fixed
// Assert.assertEquals(special_character_str_2, f1);
Assert.assertEquals(special_character_str_2.substring(0, special_character_str_1.length() - 2), f1);
Assert.assertEquals(special_character_str_2.substring(1, special_character_str_1.length() - 1), f1);
String f2 = rs.getString(3);
Assert.assertNull(f2);
}
@ -345,6 +345,32 @@ public class InsertSpecialCharacterJniTest {
}
}
@Test
public void testCase12() throws SQLException {
final long now = System.currentTimeMillis();
// insert
final String sql = "insert into " + tbname1 + "(ts, f1, f2) values(?, 'HelloTDengine', ?) ; ";
try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
pstmt.setTimestamp(1, new Timestamp(now));
pstmt.setString(2, special_character_str_4);
int ret = pstmt.executeUpdate();
Assert.assertEquals(1, ret);
}
// query
final String query = "select * from " + tbname1;
try (Statement stmt = conn.createStatement()) {
ResultSet rs = stmt.executeQuery(query);
rs.next();
long timestamp = rs.getTimestamp(1).getTime();
Assert.assertEquals(now, timestamp);
String f1 = new String(rs.getBytes(2));
Assert.assertEquals("HelloTDengine", f1);
String f2 = rs.getString(3);
Assert.assertEquals(special_character_str_4, f2);
}
}
@Before
public void before() throws SQLException {
try (Statement stmt = conn.createStatement()) {

View File

@ -13,7 +13,7 @@ public class InsertSpecialCharacterRestfulTest {
private static String tbname1 = "test";
private static String tbname2 = "weather";
private static String special_character_str_1 = "$asd$$fsfsf$";
private static String special_character_str_2 = "\\asdfsfsf\\\\";
private static String special_character_str_2 = "\\\\asdfsfsf\\\\";
private static String special_character_str_3 = "\\\\asdfsfsf\\";
private static String special_character_str_4 = "?asd??fsf?sf?";
private static String special_character_str_5 = "?#sd@$f(('<(s[P)>\"){]}f?s[]{}%vaew|\"fsfs^a&d*jhg)(j))(f@~!?$";
@ -49,7 +49,7 @@ public class InsertSpecialCharacterRestfulTest {
@Test
public void testCase02() throws SQLException {
//TODO:
// Expected :\asdfsfsf\\
// Expected :\asdfsfsf\
// Actual :\asdfsfsf\
final long now = System.currentTimeMillis();
@ -71,7 +71,7 @@ public class InsertSpecialCharacterRestfulTest {
String f1 = new String(rs.getBytes(2));
//TODO: bug to be fixed
// Assert.assertEquals(special_character_str_2, f1);
Assert.assertEquals(special_character_str_2.substring(0, special_character_str_1.length() - 2), f1);
Assert.assertEquals(special_character_str_2.substring(1, special_character_str_1.length() - 1), f1);
String f2 = rs.getString(3);
Assert.assertNull(f2);
}
@ -346,6 +346,31 @@ public class InsertSpecialCharacterRestfulTest {
}
}
@Test
public void testCase12() throws SQLException {
final long now = System.currentTimeMillis();
// insert
final String sql = "insert into " + tbname1 + "(ts, f1, f2) values(?, 'HelloTDengine', ?) ; ";
try (PreparedStatement pstmt = conn.prepareStatement(sql)) {
pstmt.setTimestamp(1, new Timestamp(now));
pstmt.setString(2, special_character_str_4);
int ret = pstmt.executeUpdate();
Assert.assertEquals(1, ret);
}
// query
final String query = "select * from " + tbname1;
try (Statement stmt = conn.createStatement()) {
ResultSet rs = stmt.executeQuery(query);
rs.next();
long timestamp = rs.getTimestamp(1).getTime();
Assert.assertEquals(now, timestamp);
String f1 = new String(rs.getBytes(2));
Assert.assertEquals("HelloTDengine", f1);
String f2 = rs.getString(3);
Assert.assertEquals(special_character_str_4, f2);
}
}
@Before
public void before() throws SQLException {
try (Statement stmt = conn.createStatement()) {

154
src/connector/python/.gitignore vendored Normal file
View File

@ -0,0 +1,154 @@
# Created by https://www.toptal.com/developers/gitignore/api/python
# Edit at https://www.toptal.com/developers/gitignore?templates=python
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
pytestdebug.log
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
doc/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
#poetry.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
# .env
.env/
.venv/
env/
venv/
ENV/
env.bak/
venv.bak/
pythonenv*
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# operating system-related files
# file properties cache/storage on macOS
*.DS_Store
# thumbnail cache on Windows
Thumbs.db
# profiling data
.prof
# End of https://www.toptal.com/developers/gitignore/api/python

View File

@ -0,0 +1,17 @@
# TDengine Connector for Python
[TDengine] connector for Python enables python programs to access TDengine, using an API which is compliant with the Python DB API 2.0 (PEP-249). It uses TDengine C client library for client server communications.
## Install
```sh
pip install git+https://github.com/taosdata/TDengine-connector-python
```
## Source Code
[TDengine] connector for Python source code is hosted on [GitHub](https://github.com/taosdata/TDengine-connector-python).
## License - AGPL
Keep same with [TDengine](https://github.com/taosdata/TDengine).

View File

@ -0,0 +1,12 @@
import taos
conn = taos.connect(host='127.0.0.1',
user='root',
passworkd='taodata',
database='log')
cursor = conn.cursor()
sql = "select * from log.log limit 10"
cursor.execute(sql)
for row in cursor:
print(row)

View File

@ -0,0 +1 @@
../

View File

@ -1 +0,0 @@
# TDengine python client interface

View File

@ -1,20 +0,0 @@
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="taos",
version="2.0.9",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pypa/sampleproject",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2",
"Operating System :: Linux",
],
)

View File

@ -1,648 +0,0 @@
import ctypes
from .constants import FieldType
from .error import *
import math
import datetime
def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
_timestamp_converter = _convert_millisecond_to_datetime
if micro:
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
if num_of_rows > 0:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data)
res.append(tmpstr.value.decode())
else:
res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows > 0:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows >= 0:
for i in range(abs(num_of_rows)):
try:
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode())
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
_CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char),
('bytes', ctypes.c_short)]
# C interface class
class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.so')
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
libtaos.taos_init.restype = None
libtaos.taos_connect.restype = ctypes.c_void_p
#libtaos.taos_use_result.restype = ctypes.c_void_p
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
libtaos.taos_errstr.restype = ctypes.c_char_p
libtaos.taos_subscribe.restype = ctypes.c_void_p
libtaos.taos_consume.restype = ctypes.c_void_p
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
libtaos.taos_free_result.restype = None
libtaos.taos_errno.restype = ctypes.c_int
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
def __init__(self, config=None):
'''
Function to initialize the class
@host : str, hostname to connect
@user : str, username to connect to server
@password : str, password to connect to server
@db : str, default db to use when log in
@config : str, config directory
@rtype : None
'''
if config is None:
self._config = ctypes.c_char_p(None)
else:
try:
self._config = ctypes.c_char_p(config.encode('utf-8'))
except AttributeError:
raise AttributeError("config is expected as a str")
if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init()
@property
def config(self):
""" Get current config
"""
return self._config
def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
'''
Function to connect to server
@rtype: c_void_p, TDengine handle
'''
# host
try:
_host = ctypes.c_char_p(host.encode(
"utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("host is expected as a str")
# user
try:
_user = ctypes.c_char_p(user.encode("utf-8"))
except AttributeError:
raise AttributeError("user is expected as a str")
# password
try:
_password = ctypes.c_char_p(password.encode("utf-8"))
except AttributeError:
raise AttributeError("password is expected as a str")
# db
try:
_db = ctypes.c_char_p(
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("db is expected as a str")
# port
try:
_port = ctypes.c_int(port)
except TypeError:
raise TypeError("port is expected as an int")
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port))
if connection.value is None:
print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
# else:
# print('connect to TDengine success')
return connection
@staticmethod
def close(connection):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
#print('connection is closed')
@staticmethod
def query(connection, sql):
'''Run SQL
@sql: str, sql string to run
@rtype: 0 on success and -1 on failure
'''
try:
return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError:
raise AttributeError("sql is expected as a string")
# finally:
# CTaosInterface.libtaos.close(connection)
@staticmethod
def affectedRows(result):
"""The affected rows after runing query
"""
return CTaosInterface.libtaos.taos_affected_rows(result)
@staticmethod
def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription
@restart boolean,
@sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription
"""
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
connection,
1 if restart else 0,
ctypes.c_char_p(topic.encode('utf-8')),
ctypes.c_char_p(sql.encode('utf-8')),
None,
None,
interval))
@staticmethod
def consume(sub):
"""Consume data of a subscription
"""
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return result, fields
@staticmethod
def unsubscribe(sub, keepProgress):
"""Cancel a subscription
"""
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
@staticmethod
def useResult(result):
'''Use result after calling self.query
'''
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.fieldsCount(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return fields
@staticmethod
def fetchBlock(result, fields):
pblock = ctypes.c_void_p(0)
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
result, ctypes.byref(pblock))
if num_of_rows == 0:
return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows)
@staticmethod
def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock:
num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError(
"Invalid data type returned from database")
if data is None:
blocks[i] = [None]
else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else:
return None, 0
return blocks, abs(num_of_rows)
@staticmethod
def freeResult(result):
CTaosInterface.libtaos.taos_free_result(result)
result.value = None
@staticmethod
def fieldsCount(result):
return CTaosInterface.libtaos.taos_field_count(result)
@staticmethod
def fetchFields(result):
return CTaosInterface.libtaos.taos_fetch_fields(result)
# @staticmethod
# def fetchRow(result, fields):
# l = []
# row = CTaosInterface.libtaos.taos_fetch_row(result)
# if not row:
# return None
# for i in range(len(fields)):
# l.append(CTaosInterface.getDataValue(
# row[i], fields[i]['type'], fields[i]['bytes']))
# return tuple(l)
# @staticmethod
# def getDataValue(data, dtype, byte):
# '''
# '''
# if not data:
# return None
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
@staticmethod
def errno(result):
"""Return the error number.
"""
return CTaosInterface.libtaos.taos_errno(result)
@staticmethod
def errStr(result):
"""Return the error styring
"""
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
if __name__ == '__main__':
cinter = CTaosInterface()
conn = cinter.connect()
result = cinter.query(conn, 'show databases')
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
fields = CTaosInterface.useResult(result)
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
print(data)
cinter.freeResult(result)
cinter.close(conn)

View File

@ -1,278 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
from .constants import FieldType
class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation.
Attributes:
.description: Read-only attribute consists of 7-item sequences:
> name (mondatory)
> type_code (mondatory)
> display_size
> internal_size
> precision
> scale
> null_ok
This attribute will be None for operations that do not return rows or
if the cursor has not had an operation invoked via the .execute*() method yet.
.rowcount:This read-only attribute specifies the number of rows that the last
.execute*() produced (for DQL statements like SELECT) or affected
"""
def __init__(self, connection=None):
self._description = []
self._rowcount = -1
self._connection = None
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
self._logfile = ""
if connection is not None:
self._connection = connection
def __iter__(self):
return self
def next(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
block, self._block_rows = CTaosInterface.fetchRow(
self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
self._block = list(map(tuple, zip(*block)))
self._block_iter = 0
data = self._block[self._block_iter]
self._block_iter += 1
return data
@property
def description(self):
"""Return the description of the object.
"""
return self._description
@property
def rowcount(self):
"""Return the rowcount of the object
"""
return self._rowcount
@property
def affected_rows(self):
"""Return the affected_rows of the object
"""
return self._affected_rows
def callproc(self, procname, *args):
"""Call a stored database procedure with the given name.
Void functionality since no stored procedures.
"""
pass
def log(self, logfile):
self._logfile = logfile
def close(self):
"""Close the cursor.
"""
if self._connection is None:
return False
self._reset_result()
self._connection = None
return True
def execute(self, operation, params=None):
"""Prepare and execute a database operation (query or command).
"""
if not operation:
return None
if not self._connection:
# TODO : change the exception raised here
raise ProgrammingError("Cursor is not connected")
self._reset_result()
stmt = operation
if params is not None:
pass
# global querySeqNum
# querySeqNum += 1
# localSeqNum = querySeqNum # avoid raice condition
# print(" >> Exec Query ({}): {}".format(localSeqNum, str(stmt)))
self._result = CTaosInterface.query(self._connection._conn, stmt)
# print(" << Query ({}) Exec Done".format(localSeqNum))
if (self._logfile):
with open(self._logfile, "a") as logfile:
logfile.write("%s;\n" % operation)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows(
self._result)
return CTaosInterface.affectedRows(self._result)
else:
self._fields = CTaosInterface.useResult(
self._result)
return self._handle_result()
else:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
def executemany(self, operation, seq_of_parameters):
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
"""
pass
def fetchone(self):
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
"""
pass
def fetchmany(self):
pass
def istype(self, col, dataType):
if (dataType.upper() == "BOOL"):
if (self._description[col][1] == FieldType.C_BOOL):
return True
if (dataType.upper() == "TINYINT"):
if (self._description[col][1] == FieldType.C_TINYINT):
return True
if (dataType.upper() == "TINYINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
return True
if (dataType.upper() == "SMALLINT"):
if (self._description[col][1] == FieldType.C_SMALLINT):
return True
if (dataType.upper() == "SMALLINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
return True
if (dataType.upper() == "INT"):
if (self._description[col][1] == FieldType.C_INT):
return True
if (dataType.upper() == "INT UNSIGNED"):
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
return True
if (dataType.upper() == "BIGINT"):
if (self._description[col][1] == FieldType.C_BIGINT):
return True
if (dataType.upper() == "BIGINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
return True
if (dataType.upper() == "FLOAT"):
if (self._description[col][1] == FieldType.C_FLOAT):
return True
if (dataType.upper() == "DOUBLE"):
if (self._description[col][1] == FieldType.C_DOUBLE):
return True
if (dataType.upper() == "BINARY"):
if (self._description[col][1] == FieldType.C_BINARY):
return True
if (dataType.upper() == "TIMESTAMP"):
if (self._description[col][1] == FieldType.C_TIMESTAMP):
return True
if (dataType.upper() == "NCHAR"):
if (self._description[col][1] == FieldType.C_NCHAR):
return True
return False
def fetchall_row(self):
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
"""
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def fetchall(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def nextset(self):
"""
"""
pass
def setinputsize(self, sizes):
pass
def setutputsize(self, size, column=None):
pass
def _reset_result(self):
"""Reset the result to unused version.
"""
self._description = []
self._rowcount = -1
if self._result is not None:
CTaosInterface.freeResult(self._result)
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
def _handle_result(self):
"""Handle the return result from query.
"""
self._description = []
for ele in self._fields:
self._description.append(
(ele['name'], ele['type'], None, None, None, None, False))
return self._result

View File

@ -0,0 +1 @@
../

View File

@ -1,12 +0,0 @@
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
This program is free software: you can use, redistribute, and/or modify
it under the terms of the GNU Affero General Public License, version 3
or later ("AGPL"), as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@ -1 +0,0 @@
# TDengine python client interface

View File

@ -1,20 +0,0 @@
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="taos",
version="2.0.9",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pypa/sampleproject",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: Linux",
],
)

View File

@ -1,24 +0,0 @@
from .connection import TDengineConnection
from .cursor import TDengineCursor
# Globals
threadsafety = 0
paramstyle = 'pyformat'
__all__ = ['connection', 'cursor']
def connect(*args, **kwargs):
""" Function to return a TDengine connector object
Current supporting keyword parameters:
@dsn: Data source name as string
@user: Username as string(optional)
@password: Password as string(optional)
@host: Hostname(optional)
@database: Database name(optional)
@rtype: TDengineConnector
"""
return TDengineConnection(*args, **kwargs)

View File

@ -1,95 +0,0 @@
from .cursor import TDengineCursor
from .subscription import TDengineSubscription
from .cinterface import CTaosInterface
class TDengineConnection(object):
""" TDengine connection object
"""
def __init__(self, *args, **kwargs):
self._conn = None
self._host = None
self._user = "root"
self._password = "taosdata"
self._database = None
self._port = 0
self._config = None
self._chandle = None
self.config(**kwargs)
def config(self, **kwargs):
# host
if 'host' in kwargs:
self._host = kwargs['host']
# user
if 'user' in kwargs:
self._user = kwargs['user']
# password
if 'password' in kwargs:
self._password = kwargs['password']
# database
if 'database' in kwargs:
self._database = kwargs['database']
# port
if 'port' in kwargs:
self._port = kwargs['port']
# config
if 'config' in kwargs:
self._config = kwargs['config']
self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self):
"""Close current connection.
"""
return CTaosInterface.close(self._conn)
def subscribe(self, restart, topic, sql, interval):
"""Create a subscription.
"""
if self._conn is None:
return None
sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub)
def cursor(self):
"""Return a new Cursor object using the connection.
"""
return TDengineCursor(self)
def commit(self):
"""Commit any pending transaction to the database.
Since TDengine do not support transactions, the implement is void functionality.
"""
pass
def rollback(self):
"""Void functionality
"""
pass
def clear_result_set(self):
"""Clear unused result set on this connection.
"""
pass
if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107')
conn.close()
print("Hello world")

View File

@ -1,42 +0,0 @@
"""Constants in TDengine python
"""
from .dbapi import *
class FieldType(object):
"""TDengine Field Types
"""
# type_code
C_NULL = 0
C_BOOL = 1
C_TINYINT = 2
C_SMALLINT = 3
C_INT = 4
C_BIGINT = 5
C_FLOAT = 6
C_DOUBLE = 7
C_BINARY = 8
C_TIMESTAMP = 9
C_NCHAR = 10
C_TINYINT_UNSIGNED = 11
C_SMALLINT_UNSIGNED = 12
C_INT_UNSIGNED = 13
C_BIGINT_UNSIGNED = 14
# NULL value definition
# NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)])
# Timestamp precision definition
C_TIMESTAMP_MILLI = 0
C_TIMESTAMP_MICRO = 1

View File

@ -1,44 +0,0 @@
"""Type Objects and Constructors.
"""
import time
import datetime
class DBAPITypeObject(object):
def __init__(self, *values):
self.values = values
def __com__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject()

View File

@ -1,66 +0,0 @@
"""Python exceptions
"""
class Error(Exception):
def __init__(self, msg=None, errno=None):
self.msg = msg
self._full_msg = self.msg
self.errno = errno
def __str__(self):
return self._full_msg
class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting.
"""
pass
class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself.
"""
pass
class DatabaseError(Error):
"""Exception raised for errors that are related to the database.
"""
pass
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
"""
pass
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
"""
pass
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database is affected.
"""
pass
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error.
"""
pass
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors.
"""
pass
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,.
"""
pass

View File

@ -1,57 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
class TDengineSubscription(object):
"""TDengine subscription object
"""
def __init__(self, sub):
self._sub = sub
def consume(self):
"""Consume rows of a subscription
"""
if self._sub is None:
raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))]
while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0:
break
for i in range(len(fields)):
buffer[i].extend(block[i])
self.fields = fields
return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
"""Close the Subscription.
"""
if self._sub is None:
return False
CTaosInterface.unsubscribe(self._sub, keepProgress)
return True
if __name__ == '__main__':
from .connection import TDengineConnection
conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0, 10):
data = sub.consume()
for d in data:
print(d)
sub.close()
conn.close()

View File

@ -0,0 +1 @@
../

View File

@ -1,12 +0,0 @@
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
This program is free software: you can use, redistribute, and/or modify
it under the terms of the GNU Affero General Public License, version 3
or later ("AGPL"), as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@ -1 +0,0 @@
# TDengine python client interface

View File

@ -1,20 +0,0 @@
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="taos",
version="2.0.9",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pypa/sampleproject",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: MacOS X",
],
)

View File

@ -1,24 +0,0 @@
from .connection import TDengineConnection
from .cursor import TDengineCursor
# Globals
threadsafety = 0
paramstyle = 'pyformat'
__all__ = ['connection', 'cursor']
def connect(*args, **kwargs):
""" Function to return a TDengine connector object
Current supporting keyword parameters:
@dsn: Data source name as string
@user: Username as string(optional)
@password: Password as string(optional)
@host: Hostname(optional)
@database: Database name(optional)
@rtype: TDengineConnector
"""
return TDengineConnection(*args, **kwargs)

View File

@ -1,648 +0,0 @@
import ctypes
from .constants import FieldType
from .error import *
import math
import datetime
def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
_timestamp_converter = _convert_millisecond_to_datetime
if micro:
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
if num_of_rows > 0:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data)
res.append(tmpstr.value.decode())
else:
res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows > 0:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows >= 0:
for i in range(abs(num_of_rows)):
try:
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode())
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
_CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char),
('bytes', ctypes.c_short)]
# C interface class
class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.dylib')
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
libtaos.taos_init.restype = None
libtaos.taos_connect.restype = ctypes.c_void_p
#libtaos.taos_use_result.restype = ctypes.c_void_p
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
libtaos.taos_errstr.restype = ctypes.c_char_p
libtaos.taos_subscribe.restype = ctypes.c_void_p
libtaos.taos_consume.restype = ctypes.c_void_p
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
libtaos.taos_free_result.restype = None
libtaos.taos_errno.restype = ctypes.c_int
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
def __init__(self, config=None):
'''
Function to initialize the class
@host : str, hostname to connect
@user : str, username to connect to server
@password : str, password to connect to server
@db : str, default db to use when log in
@config : str, config directory
@rtype : None
'''
if config is None:
self._config = ctypes.c_char_p(None)
else:
try:
self._config = ctypes.c_char_p(config.encode('utf-8'))
except AttributeError:
raise AttributeError("config is expected as a str")
if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init()
@property
def config(self):
""" Get current config
"""
return self._config
def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
'''
Function to connect to server
@rtype: c_void_p, TDengine handle
'''
# host
try:
_host = ctypes.c_char_p(host.encode(
"utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("host is expected as a str")
# user
try:
_user = ctypes.c_char_p(user.encode("utf-8"))
except AttributeError:
raise AttributeError("user is expected as a str")
# password
try:
_password = ctypes.c_char_p(password.encode("utf-8"))
except AttributeError:
raise AttributeError("password is expected as a str")
# db
try:
_db = ctypes.c_char_p(
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("db is expected as a str")
# port
try:
_port = ctypes.c_int(port)
except TypeError:
raise TypeError("port is expected as an int")
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port))
if connection.value is None:
print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
# else:
# print('connect to TDengine success')
return connection
@staticmethod
def close(connection):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
#print('connection is closed')
@staticmethod
def query(connection, sql):
'''Run SQL
@sql: str, sql string to run
@rtype: 0 on success and -1 on failure
'''
try:
return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError:
raise AttributeError("sql is expected as a string")
# finally:
# CTaosInterface.libtaos.close(connection)
@staticmethod
def affectedRows(result):
"""The affected rows after runing query
"""
return CTaosInterface.libtaos.taos_affected_rows(result)
@staticmethod
def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription
@restart boolean,
@sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription
"""
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
connection,
1 if restart else 0,
ctypes.c_char_p(topic.encode('utf-8')),
ctypes.c_char_p(sql.encode('utf-8')),
None,
None,
interval))
@staticmethod
def consume(sub):
"""Consume data of a subscription
"""
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return result, fields
@staticmethod
def unsubscribe(sub, keepProgress):
"""Cancel a subscription
"""
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
@staticmethod
def useResult(result):
'''Use result after calling self.query
'''
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.fieldsCount(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return fields
@staticmethod
def fetchBlock(result, fields):
pblock = ctypes.c_void_p(0)
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
result, ctypes.byref(pblock))
if num_of_rows == 0:
return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows)
@staticmethod
def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock:
num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError(
"Invalid data type returned from database")
if data is None:
blocks[i] = [None]
else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else:
return None, 0
return blocks, abs(num_of_rows)
@staticmethod
def freeResult(result):
CTaosInterface.libtaos.taos_free_result(result)
result.value = None
@staticmethod
def fieldsCount(result):
return CTaosInterface.libtaos.taos_field_count(result)
@staticmethod
def fetchFields(result):
return CTaosInterface.libtaos.taos_fetch_fields(result)
# @staticmethod
# def fetchRow(result, fields):
# l = []
# row = CTaosInterface.libtaos.taos_fetch_row(result)
# if not row:
# return None
# for i in range(len(fields)):
# l.append(CTaosInterface.getDataValue(
# row[i], fields[i]['type'], fields[i]['bytes']))
# return tuple(l)
# @staticmethod
# def getDataValue(data, dtype, byte):
# '''
# '''
# if not data:
# return None
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
@staticmethod
def errno(result):
"""Return the error number.
"""
return CTaosInterface.libtaos.taos_errno(result)
@staticmethod
def errStr(result):
"""Return the error styring
"""
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
if __name__ == '__main__':
cinter = CTaosInterface()
conn = cinter.connect()
result = cinter.query(conn, 'show databases')
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
fields = CTaosInterface.useResult(result)
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
print(data)
cinter.freeResult(result)
cinter.close(conn)

View File

@ -1,95 +0,0 @@
from .cursor import TDengineCursor
from .subscription import TDengineSubscription
from .cinterface import CTaosInterface
class TDengineConnection(object):
""" TDengine connection object
"""
def __init__(self, *args, **kwargs):
self._conn = None
self._host = None
self._user = "root"
self._password = "taosdata"
self._database = None
self._port = 0
self._config = None
self._chandle = None
self.config(**kwargs)
def config(self, **kwargs):
# host
if 'host' in kwargs:
self._host = kwargs['host']
# user
if 'user' in kwargs:
self._user = kwargs['user']
# password
if 'password' in kwargs:
self._password = kwargs['password']
# database
if 'database' in kwargs:
self._database = kwargs['database']
# port
if 'port' in kwargs:
self._port = kwargs['port']
# config
if 'config' in kwargs:
self._config = kwargs['config']
self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self):
"""Close current connection.
"""
return CTaosInterface.close(self._conn)
def subscribe(self, restart, topic, sql, interval):
"""Create a subscription.
"""
if self._conn is None:
return None
sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub)
def cursor(self):
"""Return a new Cursor object using the connection.
"""
return TDengineCursor(self)
def commit(self):
"""Commit any pending transaction to the database.
Since TDengine do not support transactions, the implement is void functionality.
"""
pass
def rollback(self):
"""Void functionality
"""
pass
def clear_result_set(self):
"""Clear unused result set on this connection.
"""
pass
if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107')
conn.close()
print("Hello world")

View File

@ -1,42 +0,0 @@
"""Constants in TDengine python
"""
from .dbapi import *
class FieldType(object):
"""TDengine Field Types
"""
# type_code
C_NULL = 0
C_BOOL = 1
C_TINYINT = 2
C_SMALLINT = 3
C_INT = 4
C_BIGINT = 5
C_FLOAT = 6
C_DOUBLE = 7
C_BINARY = 8
C_TIMESTAMP = 9
C_NCHAR = 10
C_TINYINT_UNSIGNED = 11
C_SMALLINT_UNSIGNED = 12
C_INT_UNSIGNED = 13
C_BIGINT_UNSIGNED = 14
# NULL value definition
# NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)])
# Timestamp precision definition
C_TIMESTAMP_MILLI = 0
C_TIMESTAMP_MICRO = 1

View File

@ -1,280 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
from .constants import FieldType
# querySeqNum = 0
class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation.
Attributes:
.description: Read-only attribute consists of 7-item sequences:
> name (mondatory)
> type_code (mondatory)
> display_size
> internal_size
> precision
> scale
> null_ok
This attribute will be None for operations that do not return rows or
if the cursor has not had an operation invoked via the .execute*() method yet.
.rowcount:This read-only attribute specifies the number of rows that the last
.execute*() produced (for DQL statements like SELECT) or affected
"""
def __init__(self, connection=None):
self._description = []
self._rowcount = -1
self._connection = None
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
self._logfile = ""
if connection is not None:
self._connection = connection
def __iter__(self):
return self
def __next__(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
block, self._block_rows = CTaosInterface.fetchRow(
self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
self._block = list(map(tuple, zip(*block)))
self._block_iter = 0
data = self._block[self._block_iter]
self._block_iter += 1
return data
@property
def description(self):
"""Return the description of the object.
"""
return self._description
@property
def rowcount(self):
"""Return the rowcount of the object
"""
return self._rowcount
@property
def affected_rows(self):
"""Return the rowcount of insertion
"""
return self._affected_rows
def callproc(self, procname, *args):
"""Call a stored database procedure with the given name.
Void functionality since no stored procedures.
"""
pass
def log(self, logfile):
self._logfile = logfile
def close(self):
"""Close the cursor.
"""
if self._connection is None:
return False
self._reset_result()
self._connection = None
return True
def execute(self, operation, params=None):
"""Prepare and execute a database operation (query or command).
"""
if not operation:
return None
if not self._connection:
# TODO : change the exception raised here
raise ProgrammingError("Cursor is not connected")
self._reset_result()
stmt = operation
if params is not None:
pass
# global querySeqNum
# querySeqNum += 1
# localSeqNum = querySeqNum # avoid raice condition
# print(" >> Exec Query ({}): {}".format(localSeqNum, str(stmt)))
self._result = CTaosInterface.query(self._connection._conn, stmt)
# print(" << Query ({}) Exec Done".format(localSeqNum))
if (self._logfile):
with open(self._logfile, "a") as logfile:
logfile.write("%s;\n" % operation)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows(
self._result)
return CTaosInterface.affectedRows(self._result)
else:
self._fields = CTaosInterface.useResult(
self._result)
return self._handle_result()
else:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
def executemany(self, operation, seq_of_parameters):
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
"""
pass
def fetchone(self):
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
"""
pass
def fetchmany(self):
pass
def istype(self, col, dataType):
if (dataType.upper() == "BOOL"):
if (self._description[col][1] == FieldType.C_BOOL):
return True
if (dataType.upper() == "TINYINT"):
if (self._description[col][1] == FieldType.C_TINYINT):
return True
if (dataType.upper() == "TINYINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_TINYINT_UNSIGNED):
return True
if (dataType.upper() == "SMALLINT"):
if (self._description[col][1] == FieldType.C_SMALLINT):
return True
if (dataType.upper() == "SMALLINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_SMALLINT_UNSIGNED):
return True
if (dataType.upper() == "INT"):
if (self._description[col][1] == FieldType.C_INT):
return True
if (dataType.upper() == "INT UNSIGNED"):
if (self._description[col][1] == FieldType.C_INT_UNSIGNED):
return True
if (dataType.upper() == "BIGINT"):
if (self._description[col][1] == FieldType.C_BIGINT):
return True
if (dataType.upper() == "BIGINT UNSIGNED"):
if (self._description[col][1] == FieldType.C_BIGINT_UNSIGNED):
return True
if (dataType.upper() == "FLOAT"):
if (self._description[col][1] == FieldType.C_FLOAT):
return True
if (dataType.upper() == "DOUBLE"):
if (self._description[col][1] == FieldType.C_DOUBLE):
return True
if (dataType.upper() == "BINARY"):
if (self._description[col][1] == FieldType.C_BINARY):
return True
if (dataType.upper() == "TIMESTAMP"):
if (self._description[col][1] == FieldType.C_TIMESTAMP):
return True
if (dataType.upper() == "NCHAR"):
if (self._description[col][1] == FieldType.C_NCHAR):
return True
return False
def fetchall_row(self):
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
"""
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def fetchall(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def nextset(self):
"""
"""
pass
def setinputsize(self, sizes):
pass
def setutputsize(self, size, column=None):
pass
def _reset_result(self):
"""Reset the result to unused version.
"""
self._description = []
self._rowcount = -1
if self._result is not None:
CTaosInterface.freeResult(self._result)
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
def _handle_result(self):
"""Handle the return result from query.
"""
self._description = []
for ele in self._fields:
self._description.append(
(ele['name'], ele['type'], None, None, None, None, False))
return self._result

View File

@ -1,44 +0,0 @@
"""Type Objects and Constructors.
"""
import time
import datetime
class DBAPITypeObject(object):
def __init__(self, *values):
self.values = values
def __com__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject()

View File

@ -1,66 +0,0 @@
"""Python exceptions
"""
class Error(Exception):
def __init__(self, msg=None, errno=None):
self.msg = msg
self._full_msg = self.msg
self.errno = errno
def __str__(self):
return self._full_msg
class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting.
"""
pass
class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself.
"""
pass
class DatabaseError(Error):
"""Exception raised for errors that are related to the database.
"""
pass
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
"""
pass
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
"""
pass
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database is affected.
"""
pass
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error.
"""
pass
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors.
"""
pass
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,.
"""
pass

View File

@ -1,57 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
class TDengineSubscription(object):
"""TDengine subscription object
"""
def __init__(self, sub):
self._sub = sub
def consume(self):
"""Consume rows of a subscription
"""
if self._sub is None:
raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))]
while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0:
break
for i in range(len(fields)):
buffer[i].extend(block[i])
self.fields = fields
return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
"""Close the Subscription.
"""
if self._sub is None:
return False
CTaosInterface.unsubscribe(self._sub, keepProgress)
return True
if __name__ == '__main__':
from .connection import TDengineConnection
conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0, 10):
data = sub.consume()
for d in data:
print(d)
sub.close()
conn.close()

View File

@ -0,0 +1,34 @@
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="taos",
version="2.0.10",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/taosdata/TDengine/tree/develop/src/connector/python",
packages=setuptools.find_packages(),
classifiers=[
"Environment :: Console",
"Environment :: MacOS X",
"Environment :: Win32 (MS Windows)",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: MacOS",
"Programming Language :: Python :: 2.7",
"Operating System :: Linux",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: Microsoft :: Windows :: Windows 10",
],
)

View File

@ -3,6 +3,7 @@ from .constants import FieldType
from .error import *
import math
import datetime
import platform
def _convert_millisecond_to_datetime(milli):
@ -20,13 +21,6 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
if micro:
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
@ -37,27 +31,16 @@ def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
@ -69,13 +52,6 @@ def _crow_tinyint_unsigned_to_python(
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
@ -86,13 +62,6 @@ def _crow_tinyint_unsigned_to_python(
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
@ -104,13 +73,6 @@ def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
@ -121,10 +83,6 @@ def _crow_smallint_unsigned_to_python(
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
@ -132,13 +90,6 @@ def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
@ -149,10 +100,6 @@ def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
@ -164,13 +111,6 @@ def _crow_bigint_unsigned_to_python(
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
@ -181,10 +121,6 @@ def _crow_bigint_unsigned_to_python(
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
@ -192,10 +128,6 @@ def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
@ -204,10 +136,6 @@ def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
if num_of_rows > 0:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
@ -236,19 +164,6 @@ def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""
assert(nbytes is not None)
res = []
if num_of_rows > 0:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
@ -268,20 +183,12 @@ def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""
assert(nbytes is not None)
res = []
if num_of_rows >= 0:
for i in range(abs(num_of_rows)):
try:
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode())
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
@ -330,9 +237,33 @@ class TaosField(ctypes.Structure):
# C interface class
def _load_taos_linux():
return ctypes.CDLL('libtaos.so')
def _load_taos_darwin():
return ctypes.cDLL('libtaos.dylib')
def _load_taos_windows():
return ctypes.windll.LoadLibrary('taos')
def _load_taos():
load_func = {
'Linux': _load_taos_linux,
'Darwin': _load_taos_darwin,
'Windows': _load_taos_windows,
}
try:
return load_func[platform.system()]()
except:
sys.exit('unsupported platform to TDengine connector')
class CTaosInterface(object):
libtaos = ctypes.CDLL('libtaos.so')
libtaos = _load_taos()
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
libtaos.taos_init.restype = None

View File

@ -45,6 +45,12 @@ class TDengineCursor(object):
return self
def __next__(self):
return self._taos_next()
def next(self):
return self._taos_next()
def _taos_next(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetch iterator")

View File

@ -0,0 +1 @@
../

View File

@ -1,12 +0,0 @@
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
This program is free software: you can use, redistribute, and/or modify
it under the terms of the GNU Affero General Public License, version 3
or later ("AGPL"), as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@ -1 +0,0 @@
# TDengine python client interface

View File

@ -1,20 +0,0 @@
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="taos",
version="2.0.9",
author="Taosdata Inc.",
author_email="support@taosdata.com",
description="TDengine python client package",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/pypa/sampleproject",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2",
"Operating System :: Windows",
],
)

View File

@ -1,24 +0,0 @@
from .connection import TDengineConnection
from .cursor import TDengineCursor
# Globals
threadsafety = 0
paramstyle = 'pyformat'
__all__ = ['connection', 'cursor']
def connect(*args, **kwargs):
""" Function to return a TDengine connector object
Current supporting keyword parameters:
@dsn: Data source name as string
@user: Username as string(optional)
@password: Password as string(optional)
@host: Hostname(optional)
@database: Database name(optional)
@rtype: TDengineConnector
"""
return TDengineConnection(*args, **kwargs)

View File

@ -1,648 +0,0 @@
import ctypes
from .constants import FieldType
from .error import *
import math
import datetime
def _convert_millisecond_to_datetime(milli):
return datetime.datetime.fromtimestamp(milli / 1000.0)
def _convert_microsecond_to_datetime(micro):
return datetime.datetime.fromtimestamp(micro / 1000000.0)
def _crow_timestamp_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
_timestamp_converter = _convert_millisecond_to_datetime
if micro:
_timestamp_converter = _convert_microsecond_to_datetime
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_NULL else _timestamp_converter(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_int64))[
:abs(num_of_rows)]]
def _crow_bool_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bool row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_byte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BOOL_NULL else bool(ele) for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_bool))[
:abs(num_of_rows)]]
def _crow_tinyint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_TINYINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_byte))[:abs(num_of_rows)]]
def _crow_tinyint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C tinyint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_TINYINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ubyte))[
:abs(num_of_rows)]]
def _crow_smallint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_short))[
:abs(num_of_rows)]]
def _crow_smallint_unsigned_to_python(
data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C smallint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_SMALLINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_ushort))[
:abs(num_of_rows)]]
def _crow_int_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_INT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int))[:abs(num_of_rows)]]
def _crow_int_unsigned_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C int row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_INT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint))[
:abs(num_of_rows)]]
def _crow_bigint_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
else:
return [None if ele == FieldType.C_BIGINT_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_int64))[:abs(num_of_rows)]]
def _crow_bigint_unsigned_to_python(
data,
num_of_rows,
nbytes=None,
micro=False):
"""Function to convert C bigint row to python row
"""
if num_of_rows > 0:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
else:
return [
None if ele == FieldType.C_BIGINT_UNSIGNED_NULL else ele for ele in ctypes.cast(
data, ctypes.POINTER(
ctypes.c_uint64))[
:abs(num_of_rows)]]
def _crow_float_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C float row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_float))[:abs(num_of_rows)]]
def _crow_double_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C double row to python row
"""
if num_of_rows > 0:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
else:
return [None if math.isnan(ele) else ele for ele in ctypes.cast(
data, ctypes.POINTER(ctypes.c_double))[:abs(num_of_rows)]]
def _crow_binary_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
if num_of_rows > 0:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
else:
return [None if ele.value[0:1] == FieldType.C_BINARY_NULL else ele.value.decode(
'utf-8') for ele in (ctypes.cast(data, ctypes.POINTER(ctypes.c_char * nbytes)))[:abs(num_of_rows)]]
def _crow_nchar_to_python(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
for i in range(abs(num_of_rows)):
try:
if num_of_rows >= 0:
tmpstr = ctypes.c_char_p(data)
res.append(tmpstr.value.decode())
else:
res.append((ctypes.cast(data + nbytes * i,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
def _crow_binary_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C binary row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows > 0:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
rbyte = ctypes.cast(
data + nbytes * i,
ctypes.POINTER(
ctypes.c_short))[
:1].pop()
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode()[0:rbyte])
except ValueError:
res.append(None)
return res
def _crow_nchar_to_python_block(data, num_of_rows, nbytes=None, micro=False):
"""Function to convert C nchar row to python row
"""
assert(nbytes is not None)
res = []
if num_of_rows >= 0:
for i in range(abs(num_of_rows)):
try:
tmpstr = ctypes.c_char_p(data + nbytes * i + 2)
res.append(tmpstr.value.decode())
except ValueError:
res.append(None)
else:
for i in range(abs(num_of_rows)):
try:
res.append((ctypes.cast(data + nbytes * i + 2,
ctypes.POINTER(ctypes.c_wchar * (nbytes // 4))))[0].value)
except ValueError:
res.append(None)
return res
_CONVERT_FUNC = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
_CONVERT_FUNC_BLOCK = {
FieldType.C_BOOL: _crow_bool_to_python,
FieldType.C_TINYINT: _crow_tinyint_to_python,
FieldType.C_SMALLINT: _crow_smallint_to_python,
FieldType.C_INT: _crow_int_to_python,
FieldType.C_BIGINT: _crow_bigint_to_python,
FieldType.C_FLOAT: _crow_float_to_python,
FieldType.C_DOUBLE: _crow_double_to_python,
FieldType.C_BINARY: _crow_binary_to_python_block,
FieldType.C_TIMESTAMP: _crow_timestamp_to_python,
FieldType.C_NCHAR: _crow_nchar_to_python_block,
FieldType.C_TINYINT_UNSIGNED: _crow_tinyint_unsigned_to_python,
FieldType.C_SMALLINT_UNSIGNED: _crow_smallint_unsigned_to_python,
FieldType.C_INT_UNSIGNED: _crow_int_unsigned_to_python,
FieldType.C_BIGINT_UNSIGNED: _crow_bigint_unsigned_to_python
}
# Corresponding TAOS_FIELD structure in C
class TaosField(ctypes.Structure):
_fields_ = [('name', ctypes.c_char * 65),
('type', ctypes.c_char),
('bytes', ctypes.c_short)]
# C interface class
class CTaosInterface(object):
libtaos = ctypes.windll.LoadLibrary('taos')
libtaos.taos_fetch_fields.restype = ctypes.POINTER(TaosField)
libtaos.taos_init.restype = None
libtaos.taos_connect.restype = ctypes.c_void_p
#libtaos.taos_use_result.restype = ctypes.c_void_p
libtaos.taos_fetch_row.restype = ctypes.POINTER(ctypes.c_void_p)
libtaos.taos_errstr.restype = ctypes.c_char_p
libtaos.taos_subscribe.restype = ctypes.c_void_p
libtaos.taos_consume.restype = ctypes.c_void_p
libtaos.taos_fetch_lengths.restype = ctypes.c_void_p
libtaos.taos_free_result.restype = None
libtaos.taos_errno.restype = ctypes.c_int
libtaos.taos_query.restype = ctypes.POINTER(ctypes.c_void_p)
def __init__(self, config=None):
'''
Function to initialize the class
@host : str, hostname to connect
@user : str, username to connect to server
@password : str, password to connect to server
@db : str, default db to use when log in
@config : str, config directory
@rtype : None
'''
if config is None:
self._config = ctypes.c_char_p(None)
else:
try:
self._config = ctypes.c_char_p(config.encode('utf-8'))
except AttributeError:
raise AttributeError("config is expected as a str")
if config is not None:
CTaosInterface.libtaos.taos_options(3, self._config)
CTaosInterface.libtaos.taos_init()
@property
def config(self):
""" Get current config
"""
return self._config
def connect(
self,
host=None,
user="root",
password="taosdata",
db=None,
port=0):
'''
Function to connect to server
@rtype: c_void_p, TDengine handle
'''
# host
try:
_host = ctypes.c_char_p(host.encode(
"utf-8")) if host is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("host is expected as a str")
# user
try:
_user = ctypes.c_char_p(user.encode("utf-8"))
except AttributeError:
raise AttributeError("user is expected as a str")
# password
try:
_password = ctypes.c_char_p(password.encode("utf-8"))
except AttributeError:
raise AttributeError("password is expected as a str")
# db
try:
_db = ctypes.c_char_p(
db.encode("utf-8")) if db is not None else ctypes.c_char_p(None)
except AttributeError:
raise AttributeError("db is expected as a str")
# port
try:
_port = ctypes.c_int(port)
except TypeError:
raise TypeError("port is expected as an int")
connection = ctypes.c_void_p(CTaosInterface.libtaos.taos_connect(
_host, _user, _password, _db, _port))
if connection.value is None:
print('connect to TDengine failed')
raise ConnectionError("connect to TDengine failed")
# sys.exit(1)
# else:
# print('connect to TDengine success')
return connection
@staticmethod
def close(connection):
'''Close the TDengine handle
'''
CTaosInterface.libtaos.taos_close(connection)
#print('connection is closed')
@staticmethod
def query(connection, sql):
'''Run SQL
@sql: str, sql string to run
@rtype: 0 on success and -1 on failure
'''
try:
return CTaosInterface.libtaos.taos_query(
connection, ctypes.c_char_p(sql.encode('utf-8')))
except AttributeError:
raise AttributeError("sql is expected as a string")
# finally:
# CTaosInterface.libtaos.close(connection)
@staticmethod
def affectedRows(result):
"""The affected rows after runing query
"""
return CTaosInterface.libtaos.taos_affected_rows(result)
@staticmethod
def subscribe(connection, restart, topic, sql, interval):
"""Create a subscription
@restart boolean,
@sql string, sql statement for data query, must be a 'select' statement.
@topic string, name of this subscription
"""
return ctypes.c_void_p(CTaosInterface.libtaos.taos_subscribe(
connection,
1 if restart else 0,
ctypes.c_char_p(topic.encode('utf-8')),
ctypes.c_char_p(sql.encode('utf-8')),
None,
None,
interval))
@staticmethod
def consume(sub):
"""Consume data of a subscription
"""
result = ctypes.c_void_p(CTaosInterface.libtaos.taos_consume(sub))
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.libtaos.taos_num_fields(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return result, fields
@staticmethod
def unsubscribe(sub, keepProgress):
"""Cancel a subscription
"""
CTaosInterface.libtaos.taos_unsubscribe(sub, 1 if keepProgress else 0)
@staticmethod
def useResult(result):
'''Use result after calling self.query
'''
fields = []
pfields = CTaosInterface.fetchFields(result)
for i in range(CTaosInterface.fieldsCount(result)):
fields.append({'name': pfields[i].name.decode('utf-8'),
'bytes': pfields[i].bytes,
'type': ord(pfields[i].type)})
return fields
@staticmethod
def fetchBlock(result, fields):
pblock = ctypes.c_void_p(0)
num_of_rows = CTaosInterface.libtaos.taos_fetch_block(
result, ctypes.byref(pblock))
if num_of_rows == 0:
return None, 0
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC_BLOCK:
raise DatabaseError("Invalid data type returned from database")
blocks[i] = _CONVERT_FUNC_BLOCK[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
return blocks, abs(num_of_rows)
@staticmethod
def fetchRow(result, fields):
pblock = ctypes.c_void_p(0)
pblock = CTaosInterface.libtaos.taos_fetch_row(result)
if pblock:
num_of_rows = 1
isMicro = (CTaosInterface.libtaos.taos_result_precision(
result) == FieldType.C_TIMESTAMP_MICRO)
blocks = [None] * len(fields)
fieldL = CTaosInterface.libtaos.taos_fetch_lengths(result)
fieldLen = [
ele for ele in ctypes.cast(
fieldL, ctypes.POINTER(
ctypes.c_int))[
:len(fields)]]
for i in range(len(fields)):
data = ctypes.cast(pblock, ctypes.POINTER(ctypes.c_void_p))[i]
if fields[i]['type'] not in _CONVERT_FUNC:
raise DatabaseError(
"Invalid data type returned from database")
if data is None:
blocks[i] = [None]
else:
blocks[i] = _CONVERT_FUNC[fields[i]['type']](
data, num_of_rows, fieldLen[i], isMicro)
else:
return None, 0
return blocks, abs(num_of_rows)
@staticmethod
def freeResult(result):
CTaosInterface.libtaos.taos_free_result(result)
result.value = None
@staticmethod
def fieldsCount(result):
return CTaosInterface.libtaos.taos_field_count(result)
@staticmethod
def fetchFields(result):
return CTaosInterface.libtaos.taos_fetch_fields(result)
# @staticmethod
# def fetchRow(result, fields):
# l = []
# row = CTaosInterface.libtaos.taos_fetch_row(result)
# if not row:
# return None
# for i in range(len(fields)):
# l.append(CTaosInterface.getDataValue(
# row[i], fields[i]['type'], fields[i]['bytes']))
# return tuple(l)
# @staticmethod
# def getDataValue(data, dtype, byte):
# '''
# '''
# if not data:
# return None
# if (dtype == CTaosInterface.TSDB_DATA_TYPE_BOOL):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_bool))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TINYINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_byte))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_SMALLINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_short))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_INT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BIGINT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_FLOAT):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_float))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_DOUBLE):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_double))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_BINARY):
# return (ctypes.cast(data, ctypes.POINTER(ctypes.c_char))[0:byte]).rstrip('\x00')
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_TIMESTAMP):
# return ctypes.cast(data, ctypes.POINTER(ctypes.c_int64))[0]
# elif (dtype == CTaosInterface.TSDB_DATA_TYPE_NCHAR):
# return (ctypes.cast(data, ctypes.c_char_p).value).rstrip('\x00')
@staticmethod
def errno(result):
"""Return the error number.
"""
return CTaosInterface.libtaos.taos_errno(result)
@staticmethod
def errStr(result):
"""Return the error styring
"""
return CTaosInterface.libtaos.taos_errstr(result).decode('utf-8')
if __name__ == '__main__':
cinter = CTaosInterface()
conn = cinter.connect()
result = cinter.query(conn, 'show databases')
print('Query Affected rows: {}'.format(cinter.affectedRows(result)))
fields = CTaosInterface.useResult(result)
data, num_of_rows = CTaosInterface.fetchBlock(result, fields)
print(data)
cinter.freeResult(result)
cinter.close(conn)

View File

@ -1,96 +0,0 @@
from .cursor import TDengineCursor
from .subscription import TDengineSubscription
from .cinterface import CTaosInterface
class TDengineConnection(object):
""" TDengine connection object
"""
def __init__(self, *args, **kwargs):
self._conn = None
self._host = None
self._user = "root"
self._password = "taosdata"
self._database = None
self._port = 0
self._config = None
self._chandle = None
if len(kwargs) > 0:
self.config(**kwargs)
def config(self, **kwargs):
# host
if 'host' in kwargs:
self._host = kwargs['host']
# user
if 'user' in kwargs:
self._user = kwargs['user']
# password
if 'password' in kwargs:
self._password = kwargs['password']
# database
if 'database' in kwargs:
self._database = kwargs['database']
# port
if 'port' in kwargs:
self._port = kwargs['port']
# config
if 'config' in kwargs:
self._config = kwargs['config']
self._chandle = CTaosInterface(self._config)
self._conn = self._chandle.connect(
self._host,
self._user,
self._password,
self._database,
self._port)
def close(self):
"""Close current connection.
"""
return CTaosInterface.close(self._conn)
def subscribe(self, restart, topic, sql, interval):
"""Create a subscription.
"""
if self._conn is None:
return None
sub = CTaosInterface.subscribe(
self._conn, restart, topic, sql, interval)
return TDengineSubscription(sub)
def cursor(self):
"""Return a new Cursor object using the connection.
"""
return TDengineCursor(self)
def commit(self):
"""Commit any pending transaction to the database.
Since TDengine do not support transactions, the implement is void functionality.
"""
pass
def rollback(self):
"""Void functionality
"""
pass
def clear_result_set(self):
"""Clear unused result set on this connection.
"""
pass
if __name__ == "__main__":
conn = TDengineConnection(host='192.168.1.107')
conn.close()
print("Hello world")

View File

@ -1,42 +0,0 @@
"""Constants in TDengine python
"""
from .dbapi import *
class FieldType(object):
"""TDengine Field Types
"""
# type_code
C_NULL = 0
C_BOOL = 1
C_TINYINT = 2
C_SMALLINT = 3
C_INT = 4
C_BIGINT = 5
C_FLOAT = 6
C_DOUBLE = 7
C_BINARY = 8
C_TIMESTAMP = 9
C_NCHAR = 10
C_TINYINT_UNSIGNED = 11
C_SMALLINT_UNSIGNED = 12
C_INT_UNSIGNED = 13
C_BIGINT_UNSIGNED = 14
# NULL value definition
# NOTE: These values should change according to C definition in tsdb.h
C_BOOL_NULL = 0x02
C_TINYINT_NULL = -128
C_TINYINT_UNSIGNED_NULL = 255
C_SMALLINT_NULL = -32768
C_SMALLINT_UNSIGNED_NULL = 65535
C_INT_NULL = -2147483648
C_INT_UNSIGNED_NULL = 4294967295
C_BIGINT_NULL = -9223372036854775808
C_BIGINT_UNSIGNED_NULL = 18446744073709551615
C_FLOAT_NULL = float('nan')
C_DOUBLE_NULL = float('nan')
C_BINARY_NULL = bytearray([int('0xff', 16)])
# Time precision definition
C_TIMESTAMP_MILLI = 0
C_TIMESTAMP_MICRO = 1

View File

@ -1,220 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
from .constants import FieldType
# querySeqNum = 0
class TDengineCursor(object):
"""Database cursor which is used to manage the context of a fetch operation.
Attributes:
.description: Read-only attribute consists of 7-item sequences:
> name (mondatory)
> type_code (mondatory)
> display_size
> internal_size
> precision
> scale
> null_ok
This attribute will be None for operations that do not return rows or
if the cursor has not had an operation invoked via the .execute*() method yet.
.rowcount:This read-only attribute specifies the number of rows that the last
.execute*() produced (for DQL statements like SELECT) or affected
"""
def __init__(self, connection=None):
self._description = []
self._rowcount = -1
self._connection = None
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
self._logfile = ""
if connection is not None:
self._connection = connection
def __iter__(self):
return self
def __next__(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetch iterator")
if self._block_rows <= self._block_iter:
block, self._block_rows = CTaosInterface.fetchRow(
self._result, self._fields)
if self._block_rows == 0:
raise StopIteration
self._block = list(map(tuple, zip(*block)))
self._block_iter = 0
data = self._block[self._block_iter]
self._block_iter += 1
return data
@property
def description(self):
"""Return the description of the object.
"""
return self._description
@property
def rowcount(self):
"""Return the rowcount of the object
"""
return self._rowcount
@property
def affected_rows(self):
"""Return the affected_rows of the object
"""
return self._affected_rows
def callproc(self, procname, *args):
"""Call a stored database procedure with the given name.
Void functionality since no stored procedures.
"""
pass
def close(self):
"""Close the cursor.
"""
if self._connection is None:
return False
self._reset_result()
self._connection = None
return True
def execute(self, operation, params=None):
"""Prepare and execute a database operation (query or command).
"""
if not operation:
return None
if not self._connection:
# TODO : change the exception raised here
raise ProgrammingError("Cursor is not connected")
self._reset_result()
stmt = operation
if params is not None:
pass
self._result = CTaosInterface.query(self._connection._conn, stmt)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno == 0:
if CTaosInterface.fieldsCount(self._result) == 0:
self._affected_rows += CTaosInterface.affectedRows(
self._result)
return CTaosInterface.affectedRows(self._result)
else:
self._fields = CTaosInterface.useResult(self._result)
return self._handle_result()
else:
raise ProgrammingError(CTaosInterface.errStr(self._result), errno)
def executemany(self, operation, seq_of_parameters):
"""Prepare a database operation (query or command) and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters.
"""
pass
def fetchone(self):
"""Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
"""
pass
def fetchmany(self):
pass
def fetchall_row(self):
"""Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). Note that the cursor's arraysize attribute can affect the performance of this operation.
"""
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchRow(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def fetchall(self):
if self._result is None or self._fields is None:
raise OperationalError("Invalid use of fetchall")
buffer = [[] for i in range(len(self._fields))]
self._rowcount = 0
while True:
block, num_of_fields = CTaosInterface.fetchBlock(
self._result, self._fields)
errno = CTaosInterface.libtaos.taos_errno(self._result)
if errno != 0:
raise ProgrammingError(
CTaosInterface.errStr(
self._result), errno)
if num_of_fields == 0:
break
self._rowcount += num_of_fields
for i in range(len(self._fields)):
buffer[i].extend(block[i])
return list(map(tuple, zip(*buffer)))
def nextset(self):
"""
"""
pass
def setinputsize(self, sizes):
pass
def setutputsize(self, size, column=None):
pass
def _reset_result(self):
"""Reset the result to unused version.
"""
self._description = []
self._rowcount = -1
if self._result is not None:
CTaosInterface.freeResult(self._result)
self._result = None
self._fields = None
self._block = None
self._block_rows = -1
self._block_iter = 0
self._affected_rows = 0
def _handle_result(self):
"""Handle the return result from query.
"""
self._description = []
for ele in self._fields:
self._description.append(
(ele['name'], ele['type'], None, None, None, None, False))
return self._result

View File

@ -1,44 +0,0 @@
"""Type Objects and Constructors.
"""
import time
import datetime
class DBAPITypeObject(object):
def __init__(self, *values):
self.values = values
def __com__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DataFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
Binary = bytes
# STRING = DBAPITypeObject(*constants.FieldType.get_string_types())
# BINARY = DBAPITypeObject(*constants.FieldType.get_binary_types())
# NUMBER = BAPITypeObject(*constants.FieldType.get_number_types())
# DATETIME = DBAPITypeObject(*constants.FieldType.get_timestamp_types())
# ROWID = DBAPITypeObject()

View File

@ -1,66 +0,0 @@
"""Python exceptions
"""
class Error(Exception):
def __init__(self, msg=None, errno=None):
self.msg = msg
self._full_msg = self.msg
self.errno = errno
def __str__(self):
return self._full_msg
class Warning(Exception):
"""Exception raised for important warnings like data truncations while inserting.
"""
pass
class InterfaceError(Error):
"""Exception raised for errors that are related to the database interface rather than the database itself.
"""
pass
class DatabaseError(Error):
"""Exception raised for errors that are related to the database.
"""
pass
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the processed data like division by zero, numeric value out of range.
"""
pass
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's operation and not necessarily under the control of the programmer
"""
pass
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database is affected.
"""
pass
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal error.
"""
pass
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors.
"""
pass
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used which is not supported by the database,.
"""
pass

View File

@ -1,57 +0,0 @@
from .cinterface import CTaosInterface
from .error import *
class TDengineSubscription(object):
"""TDengine subscription object
"""
def __init__(self, sub):
self._sub = sub
def consume(self):
"""Consume rows of a subscription
"""
if self._sub is None:
raise OperationalError("Invalid use of consume")
result, fields = CTaosInterface.consume(self._sub)
buffer = [[] for i in range(len(fields))]
while True:
block, num_of_fields = CTaosInterface.fetchBlock(result, fields)
if num_of_fields == 0:
break
for i in range(len(fields)):
buffer[i].extend(block[i])
self.fields = fields
return list(map(tuple, zip(*buffer)))
def close(self, keepProgress=True):
"""Close the Subscription.
"""
if self._sub is None:
return False
CTaosInterface.unsubscribe(self._sub, keepProgress)
return True
if __name__ == '__main__':
from .connection import TDengineConnection
conn = TDengineConnection(
host="127.0.0.1",
user="root",
password="taosdata",
database="test")
# Generate a cursor object to run SQL commands
sub = conn.subscribe(True, "test", "select * from meters;", 1000)
for i in range(0, 10):
data = sub.consume()
for d in data:
print(d)
sub.close()
conn.close()

View File

@ -0,0 +1 @@
../

View File

@ -1,12 +0,0 @@
Copyright (c) 2019 TAOS Data, Inc. <jhtao@taosdata.com>
This program is free software: you can use, redistribute, and/or modify
it under the terms of the GNU Affero General Public License, version 3
or later ("AGPL"), as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@ -1 +0,0 @@
# TDengine python client interface

Some files were not shown because too many files have changed in this diff Show More