Merge branch '3.0' into feature/TD-13066-3.0

This commit is contained in:
Cary Xu 2022-04-29 13:53:43 +08:00
commit 4a926cd8e0
30 changed files with 2043 additions and 569 deletions

View File

@ -65,10 +65,8 @@ typedef struct SDataBlockInfo {
STimeWindow window;
int32_t rows;
int32_t rowSize;
union {
int64_t uid; // from which table of uid, comes from this data block
int64_t blockId;
};
int64_t uid; // the uid of table, from which current data block comes
int64_t blockId; // block id, generated by physical planner
uint64_t groupId; // no need to serialize
int16_t numOfCols;
int16_t hasVarCol;

View File

@ -165,7 +165,7 @@ typedef struct SInputColumnInfoData {
SColumnInfoData *pPTS; // primary timestamp column
SColumnInfoData **pData;
SColumnDataAgg **pColumnDataAgg;
uint64_t uid; // table uid
uint64_t uid; // table uid, used to set the tag value when building the final query result for selectivity functions.
} SInputColumnInfoData;
// sql function runtime context

View File

@ -40,6 +40,7 @@ typedef enum EFunctionType {
FUNCTION_TYPE_STDDEV,
FUNCTION_TYPE_SUM,
FUNCTION_TYPE_TWA,
FUNCTION_TYPE_HISTOGRAM,
// nonstandard SQL function
FUNCTION_TYPE_BOTTOM = 500,

View File

@ -388,15 +388,12 @@ typedef struct {
int8_t type;
int8_t replica;
int16_t numOfColumns;
int32_t rowSize;
int32_t numOfRows;
void* pIter;
SMnode* pMnode;
STableMetaRsp* pMeta;
bool sysDbRsp;
char db[TSDB_DB_FNAME_LEN];
int16_t offset[TSDB_MAX_COLUMNS];
int32_t bytes[TSDB_MAX_COLUMNS];
} SShowObj;
typedef struct {

View File

@ -453,7 +453,7 @@ static int32_t mndRetrieveBnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p
colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false);
char buf[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(buf, pObj->pDnode->ep, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, buf, false);

View File

@ -194,7 +194,7 @@ static int32_t mndRetrieveClusters(SNodeMsg *pMsg, SShowObj *pShow, SSDataBlock
colDataAppend(pColInfo, numOfRows, (const char*) &pCluster->id, false);
char buf[tListLen(pCluster->name) + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(buf, pCluster->name, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf, pCluster->name, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, buf, false);

View File

@ -1387,12 +1387,13 @@ static void dumpDbInfoData(SSDataBlock *pBlock, SDbObj *pDb, SShowObj *pShow, in
bool sysDb) {
int32_t cols = 0;
char *buf = taosMemoryMalloc(pShow->bytes[cols]);
int32_t bytes = pShow->pMeta->pSchemas[cols].bytes;
char *buf = taosMemoryMalloc(bytes);
const char *name = mndGetDbStr(pDb->name);
if (name != NULL) {
STR_WITH_MAXSIZE_TO_VARSTR(buf, name, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf, name, bytes);
} else {
STR_WITH_MAXSIZE_TO_VARSTR(buf, "NULL", pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf, "NULL", bytes);
}
char *status = "ready";

View File

@ -705,7 +705,7 @@ static int32_t mndRetrieveDnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p
colDataAppend(pColInfo, numOfRows, (const char *)&pDnode->id, false);
char buf[tListLen(pDnode->ep) + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(buf, pDnode->ep, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf, pDnode->ep, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, buf, false);

View File

@ -517,14 +517,14 @@ static int32_t mndRetrieveFuncs(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB
cols = 0;
char b1[tListLen(pFunc->name) + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(b1, pFunc->name, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(b1, pFunc->name, pShow->pMeta->pSchemas[cols].bytes);
SColumnInfoData *pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)b1, false);
if (pFunc->pComment) {
char *b2 = taosMemoryCalloc(1, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(b2, pFunc->pComment, pShow->bytes[cols]);
char *b2 = taosMemoryCalloc(1, pShow->pMeta->pSchemas[cols].bytes);
STR_WITH_MAXSIZE_TO_VARSTR(b2, pFunc->pComment, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)b2, false);
@ -540,7 +540,7 @@ static int32_t mndRetrieveFuncs(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB
char b3[TSDB_TYPE_STR_MAX_LEN] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(b3, mnodeGenTypeStr(buf, TSDB_TYPE_STR_MAX_LEN, pFunc->outputType, pFunc->outputLen),
pShow->bytes[cols]);
pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)b3, false);

View File

@ -619,14 +619,14 @@ static int32_t mndRetrieveMnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p
colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false);
char b1[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(b1, pObj->pDnode->ep, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(b1, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, b1, false);
const char *roles = syncStr(pObj->role);
char *b2 = taosMemoryCalloc(1, strlen(roles) + VARSTR_HEADER_SIZE);
STR_WITH_MAXSIZE_TO_VARSTR(b2, roles, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(b2, roles, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)b2, false);

View File

@ -570,38 +570,39 @@ static int32_t mndRetrieveConns(SNodeMsg *pReq, SShowObj *pShow, char *data, int
if (pConn == NULL) break;
cols = 0;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
#if 0
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(uint32_t *)pWrite = pConn->id;
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->pMeta->pSchemas[cols].bytes);
cols++;
// app name
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->app, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->app, pShow->pMeta->pSchemas[cols].bytes);
cols++;
// app pid
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int32_t *)pWrite = pConn->pid;
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
taosIpPort2String(pConn->ip, pConn->port, ipStr);
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, ipStr, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, ipStr, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int64_t *)pWrite = pConn->loginTimeMs;
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
if (pConn->lastAccessTimeMs < pConn->loginTimeMs) pConn->lastAccessTimeMs = pConn->loginTimeMs;
*(int64_t *)pWrite = pConn->lastAccessTimeMs;
cols++;
#endif
numOfRows++;
}
@ -643,67 +644,67 @@ static int32_t mndRetrieveQueries(SNodeMsg *pReq, SShowObj *pShow, char *data, i
SQueryDesc *pDesc = pConn->pQueries + i;
cols = 0;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int64_t *)pWrite = htobe64(pDesc->queryId);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int64_t *)pWrite = htobe64(pConn->id);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pConn->user, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
snprintf(str, tListLen(str), "%s:%u", taosIpStr(pConn->ip), pConn->port);
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->pMeta->pSchemas[cols].bytes);
cols++;
char handleBuf[24] = {0};
snprintf(handleBuf, tListLen(handleBuf), "%" PRIu64, htobe64(pDesc->qId));
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, handleBuf, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, handleBuf, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int64_t *)pWrite = htobe64(pDesc->stime);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int64_t *)pWrite = htobe64(pDesc->useconds);
cols++;
snprintf(str, tListLen(str), "0x%" PRIx64, htobe64(pDesc->sqlObjId));
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, str, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int32_t *)pWrite = htonl(pDesc->pid);
cols++;
char epBuf[TSDB_EP_LEN + 1] = {0};
snprintf(epBuf, tListLen(epBuf), "%s:%u", pDesc->fqdn, pConn->port);
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, epBuf, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, epBuf, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(bool *)pWrite = pDesc->stableQuery;
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
*(int32_t *)pWrite = htonl(pDesc->numOfSub);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->subSqlInfo, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->subSqlInfo, pShow->pMeta->pSchemas[cols].bytes);
cols++;
pWrite = data + pShow->offset[cols] * rows + pShow->bytes[cols] * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->sql, pShow->bytes[cols]);
pWrite = data + pShow->offset[cols] * rows + pShow->pMeta->pSchemas[cols].bytes * numOfRows;
STR_WITH_MAXSIZE_TO_VARSTR(pWrite, pDesc->sql, pShow->pMeta->pSchemas[cols].bytes);
cols++;
numOfRows++;

View File

@ -517,7 +517,7 @@ static int32_t mndRetrieveQnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p
colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false);
char ep[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)ep, false);

View File

@ -209,16 +209,6 @@ static int32_t mndProcessRetrieveSysTableReq(SNodeMsg *pReq) {
pShow->pMeta = pMeta;
pShow->numOfColumns = pShow->pMeta->numOfColumns;
int32_t offset = 0;
for (int32_t i = 0; i < pShow->pMeta->numOfColumns; ++i) {
pShow->offset[i] = offset;
int32_t bytes = pShow->pMeta->pSchemas[i].bytes;
pShow->rowSize += bytes;
pShow->bytes[i] = bytes;
offset += bytes;
}
} else {
pShow = mndAcquireShowObj(pMnode, retrieveReq.showId);
if (pShow == NULL) {

View File

@ -463,7 +463,7 @@ static int32_t mndRetrieveSnodes(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *p
colDataAppend(pColInfo, numOfRows, (const char *)&pObj->id, false);
char ep[TSDB_EP_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(ep, pObj->pDnode->ep, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)ep, false);

View File

@ -1351,17 +1351,17 @@ static int32_t mndRetrieveTrans(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB
colDataAppend(pColInfo, numOfRows, (const char *)&pTrans->createdTime, false);
char stage[TSDB_TRANS_STAGE_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(stage, mndTransStr(pTrans->stage), pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(stage, mndTransStr(pTrans->stage), pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)stage, false);
char dbname[TSDB_DB_NAME_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndGetDbStr(pTrans->dbname), pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndGetDbStr(pTrans->dbname), pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)dbname, false);
char transType[TSDB_TRANS_TYPE_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndTransType(pTrans->transType), pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(dbname, mndTransType(pTrans->transType), pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)transType, false);
@ -1369,7 +1369,7 @@ static int32_t mndRetrieveTrans(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB
colDataAppend(pColInfo, numOfRows, (const char *)&pTrans->lastExecTime, false);
char lastError[TSDB_TRANS_ERROR_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(dbname, pTrans->lastError, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(dbname, pTrans->lastError, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)lastError, false);

View File

@ -657,7 +657,7 @@ static int32_t mndRetrieveUsers(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock *pB
SColumnInfoData *pColInfo = taosArrayGet(pBlock->pDataBlock, cols);
char name[TSDB_USER_LEN + VARSTR_HEADER_SIZE] = {0};
STR_WITH_MAXSIZE_TO_VARSTR(name, pUser->user, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(name, pUser->user, pShow->pMeta->pSchemas[cols].bytes);
colDataAppend(pColInfo, numOfRows, (const char *)name, false);

View File

@ -540,7 +540,7 @@ static int32_t mndRetrieveVgroups(SNodeMsg *pReq, SShowObj *pShow, SSDataBlock*
char buf1[20] = {0};
const char *role = syncStr(pVgroup->vnodeGid[i].role);
STR_WITH_MAXSIZE_TO_VARSTR(buf1, role, pShow->bytes[cols]);
STR_WITH_MAXSIZE_TO_VARSTR(buf1, role, pShow->pMeta->pSchemas[cols].bytes);
pColInfo = taosArrayGet(pBlock->pDataBlock, cols++);
colDataAppend(pColInfo, numOfRows, (const char *)buf1, false);

View File

@ -54,7 +54,9 @@ typedef struct SIFParam {
typedef int32_t (*sif_func_t)(SNode *left, SNode *rigth, SIFParam *output);
// construct tag filter operator later
static void destroyTagFilterOperatorInfo(void *param) { STagFilterOperatorInfo *pInfo = (STagFilterOperatorInfo *)param; }
static void destroyTagFilterOperatorInfo(void *param) {
STagFilterOperatorInfo *pInfo = (STagFilterOperatorInfo *)param;
}
static void sifFreeParam(SIFParam *param) {
if (param == NULL) return;
@ -62,8 +64,9 @@ static void sifFreeParam(SIFParam *param) {
}
static int32_t sifGetOperParamNum(EOperatorType ty) {
if (OP_TYPE_IS_NULL == ty || OP_TYPE_IS_NOT_NULL == ty || OP_TYPE_IS_TRUE == ty || OP_TYPE_IS_NOT_TRUE == ty || OP_TYPE_IS_FALSE == ty ||
OP_TYPE_IS_NOT_FALSE == ty || OP_TYPE_IS_UNKNOWN == ty || OP_TYPE_IS_NOT_UNKNOWN == ty || OP_TYPE_MINUS == ty) {
if (OP_TYPE_IS_NULL == ty || OP_TYPE_IS_NOT_NULL == ty || OP_TYPE_IS_TRUE == ty || OP_TYPE_IS_NOT_TRUE == ty ||
OP_TYPE_IS_FALSE == ty || OP_TYPE_IS_NOT_FALSE == ty || OP_TYPE_IS_UNKNOWN == ty ||
OP_TYPE_IS_NOT_UNKNOWN == ty || OP_TYPE_MINUS == ty) {
return 1;
}
return 2;
@ -267,7 +270,8 @@ _return:
static int32_t sifExecLogic(SLogicConditionNode *node, SIFCtx *ctx, SIFParam *output) {
if (NULL == node->pParameterList || node->pParameterList->length <= 0) {
qError("invalid logic parameter list, list:%p, paramNum:%d", node->pParameterList, node->pParameterList ? node->pParameterList->length : 0);
qError("invalid logic parameter list, list:%p, paramNum:%d", node->pParameterList,
node->pParameterList ? node->pParameterList->length : 0);
return TSDB_CODE_QRY_INVALID_INPUT;
}
@ -341,7 +345,8 @@ static EDealRes sifWalkOper(SNode *pNode, void *context) {
}
EDealRes sifCalcWalker(SNode *node, void *context) {
if (QUERY_NODE_VALUE == nodeType(node) || QUERY_NODE_NODE_LIST == nodeType(node) || QUERY_NODE_COLUMN == nodeType(node)) {
if (QUERY_NODE_VALUE == nodeType(node) || QUERY_NODE_NODE_LIST == nodeType(node) ||
QUERY_NODE_COLUMN == nodeType(node)) {
return DEAL_RES_CONTINUE;
}
SIFCtx *ctx = (SIFCtx *)context;
@ -383,21 +388,20 @@ static int32_t sifCalculate(SNode *pNode, SIFParam *pDst) {
return TSDB_CODE_QRY_OUT_OF_MEMORY;
}
nodesWalkExprPostOrder(pNode, sifCalcWalker, &ctx);
if (ctx.code != TSDB_CODE_SUCCESS) {
return ctx.code;
}
SIF_ERR_RET(ctx.code);
if (pDst) {
SIFParam *res = (SIFParam *)taosHashGet(ctx.pRes, (void *)&pNode, POINTER_BYTES);
if (res == NULL) {
qError("no valid res in hash, node:(%p), type(%d)", (void *)&pNode, nodeType(pNode));
return TSDB_CODE_QRY_APP_ERROR;
SIF_ERR_RET(TSDB_CODE_QRY_APP_ERROR);
}
taosArrayAddAll(pDst->result, res->result);
sifFreeParam(res);
taosHashRemove(ctx.pRes, (void *)&pNode, POINTER_BYTES);
}
return TSDB_CODE_SUCCESS;
SIF_RET(code);
}
int32_t doFilterTag(const SNode *pFilterNode, SArray *result) {
@ -407,22 +411,14 @@ int32_t doFilterTag(const SNode *pFilterNode, SArray *result) {
SFilterInfo *filter = NULL;
// todo move to the initialization function
int32_t code = filterInitFromNode((SNode *)pFilterNode, &filter, 0);
if (code != TSDB_CODE_SUCCESS) {
return code;
}
SIF_ERR_RET(filterInitFromNode((SNode *)pFilterNode, &filter, 0));
SIFParam param = {0};
code = sifCalculate((SNode *)pFilterNode, &param);
if (code != TSDB_CODE_SUCCESS) {
return code;
}
SIF_ERR_RET(sifCalculate((SNode *)pFilterNode, &param));
taosArrayAddAll(result, param.result);
sifFreeParam(&param);
return code;
SIF_RET(TSDB_CODE_SUCCESS);
}
SIdxFltStatus idxGetFltStatus(SNode *pFilterNode) {

View File

@ -184,8 +184,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo,
qDebug("%s data block skipped, brange:%" PRId64 "-%" PRId64 ", rows:%d", GET_TASKID(pTaskInfo),
pBlockInfo->window.skey, pBlockInfo->window.ekey, pBlockInfo->rows);
pCost->skipBlocks += 1;
pBlock->info.blockId = 0;
return TSDB_CODE_SUCCESS;
} else if (*status == FUNC_DATA_REQUIRED_STATIS_LOAD) {
pCost->loadBlockStatis += 1;
@ -206,7 +204,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo,
pBlock->pBlockAgg[pColMatchInfo->targetSlotId] = pColAgg[i];
}
pBlock->info.blockId = 0;
return TSDB_CODE_SUCCESS;
} else { // failed to load the block sma data, data block statistics does not exist, load data block instead
*status = FUNC_DATA_REQUIRED_DATA_LOAD;
@ -235,11 +232,6 @@ int32_t loadDataBlock(SOperatorInfo* pOperator, STableScanInfo* pTableScanInfo,
}
relocateColumnData(pBlock, pTableScanInfo->pColMatchInfo, pCols);
// reset the block to be 0 by default, this blockId is assigned by physical plan and is used by direct upstream
// operator.
pBlock->info.blockId = 0;
doFilter(pTableScanInfo->pFilterNode, pBlock);
if (pBlock->info.rows == 0) {
pCost->filterOutBlocks += 1;

View File

@ -68,6 +68,11 @@ bool getTopBotFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv);
int32_t topFunction(SqlFunctionCtx *pCtx);
int32_t topBotFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock);
bool getSpreadFuncEnv(struct SFunctionNode* pFunc, SFuncExecEnv* pEnv);
bool spreadFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo);
int32_t spreadFunction(SqlFunctionCtx* pCtx);
int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock);
#ifdef __cplusplus
}
#endif

View File

@ -212,7 +212,16 @@ static int32_t translateBottom(SFunctionNode* pFunc, char* pErrBuf, int32_t len)
}
static int32_t translateSpread(SFunctionNode* pFunc, char* pErrBuf, int32_t len) {
// todo
if (1 != LIST_LENGTH(pFunc->pParameterList)) {
return invaildFuncParaNumErrMsg(pErrBuf, len, pFunc->functionName);
}
uint8_t paraType = ((SExprNode*)nodesListGetNode(pFunc->pParameterList, 0))->resType.type;
if (!IS_NUMERIC_TYPE(paraType) && TSDB_DATA_TYPE_TIMESTAMP != paraType) {
return invaildFuncParaTypeErrMsg(pErrBuf, len, pFunc->functionName);
}
pFunc->node.resType = (SDataType) { .bytes = tDataTypes[TSDB_DATA_TYPE_DOUBLE].bytes, .type = TSDB_DATA_TYPE_DOUBLE };
return TSDB_CODE_SUCCESS;
}
@ -431,7 +440,8 @@ static int32_t translateToJson(SFunctionNode* pFunc, char* pErrBuf, int32_t len)
}
const SBuiltinFuncDefinition funcMgtBuiltins[] = {
{.name = "count",
{
.name = "count",
.type = FUNCTION_TYPE_COUNT,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED,
.translateFunc = translateCount,
@ -439,8 +449,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.getEnvFunc = getCountFuncEnv,
.initFunc = functionSetup,
.processFunc = countFunction,
.finalizeFunc = functionFinalize},
{.name = "sum",
.finalizeFunc = functionFinalize
},
{
.name = "sum",
.type = FUNCTION_TYPE_SUM,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED,
.translateFunc = translateSum,
@ -448,8 +460,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.getEnvFunc = getSumFuncEnv,
.initFunc = functionSetup,
.processFunc = sumFunction,
.finalizeFunc = functionFinalize},
{.name = "min",
.finalizeFunc = functionFinalize
},
{
.name = "min",
.type = FUNCTION_TYPE_MIN,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED,
.translateFunc = translateInOutNum,
@ -457,8 +471,10 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = minFunctionSetup,
.processFunc = minFunction,
.finalizeFunc = functionFinalize},
{.name = "max",
.finalizeFunc = functionFinalize
},
{
.name = "max",
.type = FUNCTION_TYPE_MAX,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_SPECIAL_DATA_REQUIRED,
.translateFunc = translateInOutNum,
@ -466,39 +482,48 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = maxFunctionSetup,
.processFunc = maxFunction,
.finalizeFunc = functionFinalize},
{.name = "stddev",
.finalizeFunc = functionFinalize
},
{
.name = "stddev",
.type = FUNCTION_TYPE_STDDEV,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = getStddevFuncEnv,
.initFunc = stddevFunctionSetup,
.processFunc = stddevFunction,
.finalizeFunc = stddevFinalize},
{.name = "avg",
.finalizeFunc = stddevFinalize
},
{
.name = "avg",
.type = FUNCTION_TYPE_AVG,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = getAvgFuncEnv,
.initFunc = avgFunctionSetup,
.processFunc = avgFunction,
.finalizeFunc = avgFinalize},
{.name = "percentile",
.finalizeFunc = avgFinalize
},
{
.name = "percentile",
.type = FUNCTION_TYPE_PERCENTILE,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translatePercentile,
.getEnvFunc = getPercentileFuncEnv,
.initFunc = percentileFunctionSetup,
.processFunc = percentileFunction,
.finalizeFunc = percentileFinalize},
{.name = "apercentile",
.finalizeFunc = percentileFinalize
},
{
.name = "apercentile",
.type = FUNCTION_TYPE_APERCENTILE,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translateApercentile,
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = maxFunctionSetup,
.processFunc = maxFunction,
.finalizeFunc = functionFinalize},
.finalizeFunc = functionFinalize
},
{
.name = "top",
.type = FUNCTION_TYPE_TOP,
@ -509,357 +534,447 @@ const SBuiltinFuncDefinition funcMgtBuiltins[] = {
.processFunc = topFunction,
.finalizeFunc = topBotFinalize,
},
{.name = "bottom",
{
.name = "bottom",
.type = FUNCTION_TYPE_BOTTOM,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translateBottom,
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = maxFunctionSetup,
.processFunc = maxFunction,
.finalizeFunc = functionFinalize},
{.name = "spread",
.finalizeFunc = functionFinalize
},
{
.name = "spread",
.type = FUNCTION_TYPE_SPREAD,
.classification = FUNC_MGT_AGG_FUNC,
.translateFunc = translateSpread,
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = maxFunctionSetup,
.processFunc = maxFunction,
.finalizeFunc = functionFinalize},
{.name = "last_row",
.dataRequiredFunc = statisDataRequired,
.getEnvFunc = getSpreadFuncEnv,
.initFunc = spreadFunctionSetup,
.processFunc = spreadFunction,
.finalizeFunc = spreadFinalize
},
{
.name = "last_row",
.type = FUNCTION_TYPE_LAST_ROW,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC,
.translateFunc = translateLastRow,
.getEnvFunc = getMinmaxFuncEnv,
.initFunc = maxFunctionSetup,
.processFunc = maxFunction,
.finalizeFunc = functionFinalize},
{.name = "first",
.finalizeFunc = functionFinalize
},
{
.name = "first",
.type = FUNCTION_TYPE_FIRST,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC,
.translateFunc = translateFirstLast,
.getEnvFunc = getFirstLastFuncEnv,
.initFunc = functionSetup,
.processFunc = firstFunction,
.finalizeFunc = functionFinalize},
{.name = "last",
.finalizeFunc = functionFinalize
},
{
.name = "last",
.type = FUNCTION_TYPE_LAST,
.classification = FUNC_MGT_AGG_FUNC | FUNC_MGT_MULTI_RES_FUNC,
.translateFunc = translateFirstLast,
.getEnvFunc = getFirstLastFuncEnv,
.initFunc = functionSetup,
.processFunc = lastFunction,
.finalizeFunc = functionFinalize},
{.name = "diff",
.finalizeFunc = functionFinalize
},
{
.name = "diff",
.type = FUNCTION_TYPE_DIFF,
.classification = FUNC_MGT_NONSTANDARD_SQL_FUNC,
.translateFunc = translateInOutNum,
.getEnvFunc = getDiffFuncEnv,
.initFunc = diffFunctionSetup,
.processFunc = diffFunction,
.finalizeFunc = functionFinalize},
{.name = "abs",
.finalizeFunc = functionFinalize
},
{
.name = "abs",
.type = FUNCTION_TYPE_ABS,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInOutNum,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = absFunction,
.finalizeFunc = NULL},
{.name = "log",
.finalizeFunc = NULL
},
{
.name = "log",
.type = FUNCTION_TYPE_LOG,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateIn2NumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = logFunction,
.finalizeFunc = NULL},
{.name = "pow",
.finalizeFunc = NULL
},
{
.name = "pow",
.type = FUNCTION_TYPE_POW,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateIn2NumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = powFunction,
.finalizeFunc = NULL},
{.name = "sqrt",
.finalizeFunc = NULL
},
{
.name = "sqrt",
.type = FUNCTION_TYPE_SQRT,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = sqrtFunction,
.finalizeFunc = NULL},
{.name = "ceil",
.finalizeFunc = NULL
},
{
.name = "ceil",
.type = FUNCTION_TYPE_CEIL,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInOutNum,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = ceilFunction,
.finalizeFunc = NULL},
{.name = "floor",
.finalizeFunc = NULL
},
{
.name = "floor",
.type = FUNCTION_TYPE_FLOOR,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInOutNum,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = floorFunction,
.finalizeFunc = NULL},
{.name = "round",
.finalizeFunc = NULL
},
{
.name = "round",
.type = FUNCTION_TYPE_ROUND,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInOutNum,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = roundFunction,
.finalizeFunc = NULL},
{.name = "sin",
.finalizeFunc = NULL
},
{
.name = "sin",
.type = FUNCTION_TYPE_SIN,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = sinFunction,
.finalizeFunc = NULL},
{.name = "cos",
.finalizeFunc = NULL
},
{
.name = "cos",
.type = FUNCTION_TYPE_COS,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = cosFunction,
.finalizeFunc = NULL},
{.name = "tan",
.finalizeFunc = NULL
},
{
.name = "tan",
.type = FUNCTION_TYPE_TAN,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = tanFunction,
.finalizeFunc = NULL},
{.name = "asin",
.finalizeFunc = NULL
},
{
.name = "asin",
.type = FUNCTION_TYPE_ASIN,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = asinFunction,
.finalizeFunc = NULL},
{.name = "acos",
.finalizeFunc = NULL
},
{
.name = "acos",
.type = FUNCTION_TYPE_ACOS,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = acosFunction,
.finalizeFunc = NULL},
{.name = "atan",
.finalizeFunc = NULL
},
{
.name = "atan",
.type = FUNCTION_TYPE_ATAN,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateInNumOutDou,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = atanFunction,
.finalizeFunc = NULL},
{.name = "length",
.finalizeFunc = NULL
},
{
.name = "length",
.type = FUNCTION_TYPE_LENGTH,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateLength,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = lengthFunction,
.finalizeFunc = NULL},
{.name = "char_length",
.finalizeFunc = NULL
},
{
.name = "char_length",
.type = FUNCTION_TYPE_CHAR_LENGTH,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateLength,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = charLengthFunction,
.finalizeFunc = NULL},
{.name = "concat",
.finalizeFunc = NULL
},
{
.name = "concat",
.type = FUNCTION_TYPE_CONCAT,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateConcat,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = concatFunction,
.finalizeFunc = NULL},
{.name = "concat_ws",
.finalizeFunc = NULL
},
{
.name = "concat_ws",
.type = FUNCTION_TYPE_CONCAT_WS,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateConcatWs,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = concatWsFunction,
.finalizeFunc = NULL},
{.name = "lower",
.finalizeFunc = NULL
},
{
.name = "lower",
.type = FUNCTION_TYPE_LOWER,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateInOutStr,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = lowerFunction,
.finalizeFunc = NULL},
{.name = "upper",
.finalizeFunc = NULL
},
{
.name = "upper",
.type = FUNCTION_TYPE_UPPER,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateInOutStr,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = upperFunction,
.finalizeFunc = NULL},
{.name = "ltrim",
.finalizeFunc = NULL
},
{
.name = "ltrim",
.type = FUNCTION_TYPE_LTRIM,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateInOutStr,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = ltrimFunction,
.finalizeFunc = NULL},
{.name = "rtrim",
.finalizeFunc = NULL
},
{
.name = "rtrim",
.type = FUNCTION_TYPE_RTRIM,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateInOutStr,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = rtrimFunction,
.finalizeFunc = NULL},
{.name = "substr",
.finalizeFunc = NULL
},
{
.name = "substr",
.type = FUNCTION_TYPE_SUBSTR,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_STRING_FUNC,
.translateFunc = translateSubstr,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = substrFunction,
.finalizeFunc = NULL},
{.name = "cast",
.finalizeFunc = NULL
},
{
.name = "cast",
.type = FUNCTION_TYPE_CAST,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateCast,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = castFunction,
.finalizeFunc = NULL},
{.name = "to_iso8601",
.finalizeFunc = NULL
},
{
.name = "to_iso8601",
.type = FUNCTION_TYPE_TO_ISO8601,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateToIso8601,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = toISO8601Function,
.finalizeFunc = NULL},
{.name = "to_unixtimestamp",
.finalizeFunc = NULL
},
{
.name = "to_unixtimestamp",
.type = FUNCTION_TYPE_TO_UNIXTIMESTAMP,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateToUnixtimestamp,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = toUnixtimestampFunction,
.finalizeFunc = NULL},
{.name = "timetruncate",
.finalizeFunc = NULL
},
{
.name = "timetruncate",
.type = FUNCTION_TYPE_TIMETRUNCATE,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateTimeTruncate,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = timeTruncateFunction,
.finalizeFunc = NULL},
{.name = "timediff",
.finalizeFunc = NULL
},
{
.name = "timediff",
.type = FUNCTION_TYPE_TIMEDIFF,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateTimeDiff,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = timeDiffFunction,
.finalizeFunc = NULL},
{.name = "now",
.finalizeFunc = NULL
},
{
.name = "now",
.type = FUNCTION_TYPE_NOW,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = nowFunction,
.finalizeFunc = NULL},
{.name = "today",
.finalizeFunc = NULL
},
{
.name = "today",
.type = FUNCTION_TYPE_TODAY,
.classification = FUNC_MGT_SCALAR_FUNC | FUNC_MGT_DATETIME_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = todayFunction,
.finalizeFunc = NULL},
{.name = "timezone",
.finalizeFunc = NULL
},
{
.name = "timezone",
.type = FUNCTION_TYPE_TIMEZONE,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateTimezone,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = timezoneFunction,
.finalizeFunc = NULL},
{.name = "_rowts",
.finalizeFunc = NULL
},
{
.name = "_rowts",
.type = FUNCTION_TYPE_ROWTS,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = NULL,
.finalizeFunc = NULL},
{.name = "tbname",
.finalizeFunc = NULL
},
{
.name = "tbname",
.type = FUNCTION_TYPE_TBNAME,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_SCAN_PC_FUNC,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC,
.translateFunc = translateTbnameColumn,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = NULL,
.finalizeFunc = NULL},
{.name = "_qstartts",
.finalizeFunc = NULL
},
{
.name = "_qstartts",
.type = FUNCTION_TYPE_QSTARTTS,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = qStartTsFunction,
.finalizeFunc = NULL},
{.name = "_qendts",
.finalizeFunc = NULL
},
{
.name = "_qendts",
.type = FUNCTION_TYPE_QENDTS,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = qEndTsFunction,
.finalizeFunc = NULL},
{.name = "_wstartts",
.finalizeFunc = NULL
},
{
.name = "_wstartts",
.type = FUNCTION_TYPE_WSTARTTS,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = winStartTsFunction,
.finalizeFunc = NULL},
{.name = "_wendts",
.finalizeFunc = NULL
},
{
.name = "_wendts",
.type = FUNCTION_TYPE_WENDTS,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC,
.translateFunc = translateTimePseudoColumn,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = winEndTsFunction,
.finalizeFunc = NULL},
{.name = "_wduration",
.finalizeFunc = NULL
},
{
.name = "_wduration",
.type = FUNCTION_TYPE_WDURATION,
.classification = FUNC_MGT_PSEUDO_COLUMN_FUNC | FUNC_MGT_WINDOW_PC_FUNC,
.translateFunc = translateWduration,
.getEnvFunc = getTimePseudoFuncEnv,
.initFunc = NULL,
.sprocessFunc = winDurFunction,
.finalizeFunc = NULL},
{.name = "to_json",
.finalizeFunc = NULL
},
{
.name = "to_json",
.type = FUNCTION_TYPE_TO_JSON,
.classification = FUNC_MGT_SCALAR_FUNC,
.translateFunc = translateToJson,
.getEnvFunc = NULL,
.initFunc = NULL,
.sprocessFunc = toJsonFunction,
.finalizeFunc = NULL}};
.finalizeFunc = NULL
}
};
const int32_t funcMgtBuiltinsNum = (sizeof(funcMgtBuiltins) / sizeof(SBuiltinFuncDefinition));

View File

@ -80,6 +80,13 @@ typedef struct SDiffInfo {
} prev;
} SDiffInfo;
typedef struct SSpreadInfo {
double result;
bool hasResult;
double min;
double max;
} SSpreadInfo;
#define SET_VAL(_info, numOfElem, res) \
do { \
if ((numOfElem) <= 0) { \
@ -1639,3 +1646,101 @@ int32_t topBotFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) {
return pEntryInfo->numOfRes;
}
bool getSpreadFuncEnv(SFunctionNode* UNUSED_PARAM(pFunc), SFuncExecEnv* pEnv) {
pEnv->calcMemSize = sizeof(SSpreadInfo);
return true;
}
bool spreadFunctionSetup(SqlFunctionCtx *pCtx, SResultRowEntryInfo* pResultInfo) {
if (!functionSetup(pCtx, pResultInfo)) {
return false;
}
SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(pResultInfo);
SET_DOUBLE_VAL(&pInfo->min, DBL_MAX);
SET_DOUBLE_VAL(&pInfo->max, -DBL_MAX);
pInfo->hasResult = false;
return true;
}
int32_t spreadFunction(SqlFunctionCtx *pCtx) {
int32_t numOfElems = 0;
// Only the pre-computing information loaded and actual data does not loaded
SInputColumnInfoData* pInput = &pCtx->input;
SColumnDataAgg *pAgg = pInput->pColumnDataAgg[0];
int32_t type = pInput->pData[0]->info.type;
SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx));
if (pInput->colDataAggIsSet) {
numOfElems = pInput->numOfRows - pAgg->numOfNull;
if (numOfElems == 0) {
goto _spread_over;
}
double tmin = 0.0, tmax = 0.0;
if (IS_SIGNED_NUMERIC_TYPE(type)) {
tmin = (double)GET_INT64_VAL(&pAgg->min);
tmax = (double)GET_INT64_VAL(&pAgg->max);
} else if (IS_FLOAT_TYPE(type)) {
tmin = GET_DOUBLE_VAL(&pAgg->min);
tmax = GET_DOUBLE_VAL(&pAgg->max);
} else if (IS_UNSIGNED_NUMERIC_TYPE(type)) {
tmin = (double)GET_UINT64_VAL(&pAgg->min);
tmax = (double)GET_UINT64_VAL(&pAgg->max);
}
if (GET_DOUBLE_VAL(&pInfo->min) > tmin) {
SET_DOUBLE_VAL(&pInfo->min, tmin);
}
if (GET_DOUBLE_VAL(&pInfo->max) < tmax) {
SET_DOUBLE_VAL(&pInfo->max, tmax);
}
} else { // computing based on the true data block
SColumnInfoData* pCol = pInput->pData[0];
int32_t start = pInput->startRowIndex;
int32_t numOfRows = pInput->numOfRows;
// check the valid data one by one
for (int32_t i = start; i < pInput->numOfRows + start; ++i) {
if (colDataIsNull_f(pCol->nullbitmap, i)) {
continue;
}
char *data = colDataGetData(pCol, i);
double v = 0;
GET_TYPED_DATA(v, double, type, data);
if (v < GET_DOUBLE_VAL(&pInfo->min)) {
SET_DOUBLE_VAL(&pInfo->min, v);
}
if (v > GET_DOUBLE_VAL(&pInfo->max)) {
SET_DOUBLE_VAL(&pInfo->max, v);
}
numOfElems += 1;
}
}
_spread_over:
// data in the check operation are all null, not output
SET_VAL(GET_RES_INFO(pCtx), numOfElems, 1);
if (numOfElems > 0) {
pInfo->hasResult = true;
}
return TSDB_CODE_SUCCESS;
}
int32_t spreadFinalize(SqlFunctionCtx* pCtx, SSDataBlock* pBlock) {
SSpreadInfo* pInfo = GET_ROWCELL_INTERBUF(GET_RES_INFO(pCtx));
if (pInfo->hasResult == true) {
SET_DOUBLE_VAL(&pInfo->result, pInfo->max - pInfo->min);
}
return functionFinalize(pCtx, pBlock);
}

View File

@ -35,9 +35,7 @@ class TDTestCase:
for i in range(len(tdSql.queryRows)):
if data_tb_col[i] is None:
tdSql.checkData( i, 0 , None )
if (col_name == "c2" or col_name == "double" ) and tbname == "t1" and i == 10:
continue
else:
if col_name not in ["c2", "double"] or tbname != "t1" or i != 10:
utc_zone = datetime.timezone.utc
utc_8 = datetime.timezone(datetime.timedelta(hours=8))
date_init_stamp = datetime.datetime.utcfromtimestamp(data_tb_col[i]/1000)
@ -597,7 +595,41 @@ class TDTestCase:
time2str = str(int(datetime.datetime.timestamp(data_t1_c10[i])*1000))
tdSql.checkData( i, 0, time2str )
tdLog.printNoPrefix("==========step39: cast constant operation to bigint, expect change to int ")
tdSql.query("select cast(12121.23323131 as bigint) as b from ct4")
( tdSql.checkData(i, 0, 12121) for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 as binary(16)) as b from ct4")
( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows)) )
tdSql.query("select cast(12121.23323131 as binary(2)) as b from ct4")
( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 as nchar(16)) as b from ct4")
( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 as nchar(2)) as b from ct4")
( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 321.876897998 as bigint) as b from ct4")
( tdSql.checkData(i, 0, 12443) for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 321.876897998 as binary(16)) as b from ct4")
( tdSql.checkData(i, 0, '12443.110129') for i in range(len(tdSql.queryRows)) )
tdSql.query("select cast(12121.23323131 + 321.876897998 as binary(3)) as b from ct4")
( tdSql.checkData(i, 0, '124') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 321.876897998 as nchar(16)) as b from ct4")
( tdSql.checkData(i, 0, '12443.110129') for i in range(len(tdSql.queryRows)) )
tdSql.query("select cast(12121.23323131 + 321.876897998 as nchar(3)) as b from ct4")
( tdSql.checkData(i, 0, '124') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as bigint) as b from ct4")
( tdSql.checkData(i, 0, 12121) for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as binary(16)) as b from ct4")
( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows)) )
tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as binary(2)) as b from ct4")
( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as nchar(16)) as b from ct4")
( tdSql.checkData(i, 0, '12121.233231') for i in range(len(tdSql.queryRows) ) )
tdSql.query("select cast(12121.23323131 + 'test~!@`#$%^&*()}{][;><.,' as nchar(2)) as b from ct4")
( tdSql.checkData(i, 0, '12') for i in range(len(tdSql.queryRows) ) )
tdLog.printNoPrefix("==========step40: error cast condition, should return error ")
tdSql.error("select cast(c1 as int) as b from ct4")
tdSql.error("select cast(c1 as bool) as b from ct4")
tdSql.error("select cast(c1 as tinyint) as b from ct4")

View File

@ -0,0 +1,248 @@
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
TS_TYPE_COL = [TS_COL]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __char_length_condition(self):
char_length_condition = []
for char_col in CHAR_COL:
char_length_condition.extend(
(
char_col,
f"upper( {char_col} )",
)
)
char_length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL)
char_length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
char_length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL )
char_length_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
return char_length_condition
def __where_condition(self, col):
# return f" where count({col}) > 0 "
return ""
def __group_condition(self, col, having = ""):
return f" group by {col} having {having}" if having else f" group by {col} "
def __char_length_current_check(self, tbname):
char_length_condition = self.__char_length_condition()
for condition in char_length_condition:
where_condition = self.__where_condition(condition)
group_having = self.__group_condition(condition, having=f"{condition} is not null " )
group_no_having= self.__group_condition(condition )
groups = ["", group_having, group_no_having]
for group_condition in groups:
tdSql.query(f"select {condition}, char_length( {condition} ) from {tbname} {where_condition} {group_condition} ")
for i in range(tdSql.queryRows):
if not tdSql.getData(i,1):
tdSql.checkData(i, 1, None)
# elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition):
# tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) * 4 )
else:
tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) )
def __char_length_err_check(self,tbname):
sqls = []
for un_char_col in UN_CHAR_COL:
sqls.extend(
(
f"select char_length( {un_char_col} ) from {tbname} ",
f"select char_length(ceil( {un_char_col} )) from {tbname} ",
f"select {un_char_col} from {tbname} group by char_length( {un_char_col} ) ",
)
)
sqls.extend( f"select char_length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL )
sqls.extend( f"select char_length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select {char_col} from {tbname} group by char_length( {char_col} ) " for char_col in CHAR_COL)
sqls.extend( f"select char_length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select char_length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL)
sqls.extend( f"select char_length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
sqls.extend(
(
f"select char_length() from {tbname} ",
f"select char_length(*) from {tbname} ",
f"select char_length(ccccccc) from {tbname} ",
f"select char_length(111) from {tbname} ",
f"select char_length(c8, 11) from {tbname} ",
)
)
return sqls
def __test_current(self):
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
self.__char_length_current_check(tb)
tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========")
def __test_error(self):
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
for errsql in self.__char_length_err_check(tb):
tdSql.error(sql=errsql)
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
def all_test(self):
self.__test_current()
self.__test_error()
def __create_tb(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
# tdDnodes.stop(1)
# tdDnodes.start(1)
# tdSql.execute("use db")
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
# self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -0,0 +1,249 @@
import datetime
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
TS_TYPE_COL = [TS_COL]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __length_condition(self):
length_condition = []
for char_col in CHAR_COL:
length_condition.extend(
(
char_col,
f"upper( {char_col} )",
)
)
length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL)
length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL )
length_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
return length_condition
def __where_condition(self, col):
# return f" where count({col}) > 0 "
return ""
def __group_condition(self, col, having = ""):
return f" group by {col} having {having}" if having else f" group by {col} "
def __length_current_check(self, tbname):
length_condition = self.__length_condition()
for condition in length_condition:
where_condition = self.__where_condition(condition)
group_having = self.__group_condition(condition, having=f"{condition} is not null " )
group_no_having= self.__group_condition(condition )
groups = ["", group_having, group_no_having]
for group_condition in groups:
tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ")
datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)]
length_data = [ len(str(data)) if data else None for data in datas ]
tdSql.query(f"select length( {condition} ) from {tbname} {where_condition} {group_condition}")
for i in range(len(length_data)):
tdSql.checkData(i, 0, length_data[i] ) if length_data[i] else tdSql.checkData(i, 0, None)
def __length_err_check(self,tbname):
sqls = []
for un_char_col in UN_CHAR_COL:
sqls.extend(
(
f"select length( {un_char_col} ) from {tbname} ",
f"select length(ceil( {un_char_col} )) from {tbname} ",
f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ",
)
)
sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL )
sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL)
sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL)
sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
sqls.extend(
(
f"select length() from {tbname} ",
f"select length(*) from {tbname} ",
f"select length(ccccccc) from {tbname} ",
f"select length(111) from {tbname} ",
f"select length(c8, 11) from {tbname} ",
)
)
return sqls
def __test_current(self):
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
tbname = ["ct1", "ct2", "ct4", "t1"]
for tb in tbname:
self.__length_current_check(tb)
tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========")
def __test_error(self):
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
tbname = ["ct1", "ct2", "ct4", "t1"]
for tb in tbname:
for errsql in self.__length_err_check(tb):
tdSql.error(sql=errsql)
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
def all_test(self):
self.__test_current()
self.__test_error()
def __create_tb(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
{ i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2}
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
# tdDnodes.stop(1)
# tdDnodes.start(1)
# tdSql.execute("use db")
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
# self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -0,0 +1,249 @@
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
TS_TYPE_COL = [TS_COL]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __length_condition(self):
length_condition = []
for char_col in CHAR_COL:
length_condition.extend(
(
char_col,
f"upper( {char_col} )",
)
)
length_condition.extend( f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL)
length_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
length_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL )
length_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
return length_condition
def __where_condition(self, col):
# return f" where count({col}) > 0 "
return ""
def __group_condition(self, col, having = ""):
return f" group by {col} having {having}" if having else f" group by {col} "
def __length_current_check(self, tbname):
length_condition = self.__length_condition()
for condition in length_condition:
where_condition = self.__where_condition(condition)
group_having = self.__group_condition(condition, having=f"{condition} is not null " )
group_no_having= self.__group_condition(condition )
groups = ["", group_having, group_no_having]
for group_condition in groups:
tdSql.query(f"select {condition}, length( {condition} ) from {tbname} {where_condition} {group_condition} ")
for i in range(tdSql.queryRows):
if not tdSql.getData(i,1):
tdSql.checkData(i, 1, None)
elif "as nchar" in condition or (NCHAR_COL in condition and "as binary" not in condition):
tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) * 4 )
else:
tdSql.checkData(i, 1, len(str(tdSql.getData(i,0) ) ) )
def __length_err_check(self,tbname):
sqls = []
for un_char_col in UN_CHAR_COL:
sqls.extend(
(
f"select length( {un_char_col} ) from {tbname} ",
f"select length(ceil( {un_char_col} )) from {tbname} ",
f"select {un_char_col} from {tbname} group by length( {un_char_col} ) ",
)
)
sqls.extend( f"select length( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL )
sqls.extend( f"select length( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select {char_col} from {tbname} group by length( {char_col} ) " for char_col in CHAR_COL)
sqls.extend( f"select length( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select length( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL)
sqls.extend( f"select length( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
sqls.extend(
(
f"select length() from {tbname} ",
f"select length(*) from {tbname} ",
f"select length(ccccccc) from {tbname} ",
f"select length(111) from {tbname} ",
f"select length(c8, 11) from {tbname} ",
)
)
return sqls
def __test_current(self):
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
self.__length_current_check(tb)
tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========")
def __test_error(self):
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
for errsql in self.__length_err_check(tb):
tdSql.error(sql=errsql)
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
def all_test(self):
self.__test_current()
self.__test_error()
def __create_tb(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
# tdDnodes.stop(1)
# tdDnodes.start(1)
# tdSql.execute("use db")
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
# self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -0,0 +1,246 @@
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
TS_TYPE_COL = [TS_COL]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __lower_condition(self):
lower_condition = []
for char_col in CHAR_COL:
lower_condition.extend(
(
char_col,
f"upper( {char_col} )",
)
)
lower_condition.extend(f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL)
lower_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
lower_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL )
lower_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
return lower_condition
def __where_condition(self, col):
# return f" where count({col}) > 0 "
return ""
def __group_condition(self, col, having = ""):
return f" group by {col} having {having}" if having else f" group by {col} "
def __lower_current_check(self, tbname):
lower_condition = self.__lower_condition()
for condition in lower_condition:
where_condition = self.__where_condition(condition)
group_having = self.__group_condition(condition, having=f"{condition} is not null " )
group_no_having= self.__group_condition(condition )
groups = ["", group_having, group_no_having]
for group_condition in groups:
tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ")
datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)]
lower_data = [ str(data).lower() if data else None for data in datas ]
tdSql.query(f"select lower( {condition} ) from {tbname} {where_condition} {group_condition}")
for i in range(len(lower_data)):
tdSql.checkData(i, 0, lower_data[i] ) if lower_data[i] else tdSql.checkData(i, 0, None)
def __lower_err_check(self,tbname):
sqls = []
for un_char_col in UN_CHAR_COL:
sqls.extend(
(
f"select lower( {un_char_col} ) from {tbname} ",
f"select lower(ceil( {un_char_col} )) from {tbname} ",
f"select {un_char_col} from {tbname} group by lower( {un_char_col} ) ",
)
)
sqls.extend( f"select lower( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL )
sqls.extend( f"select lower( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select {char_col} from {tbname} group by lower( {char_col} ) " for char_col in CHAR_COL)
sqls.extend( f"select lower( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select lower( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL)
sqls.extend( f"select lower( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
sqls.extend(
(
f"select lower() from {tbname} ",
f"select lower(*) from {tbname} ",
f"select lower(ccccccc) from {tbname} ",
f"select lower(111) from {tbname} ",
f"select lower(c8, 11) from {tbname} ",
)
)
return sqls
def __test_current(self):
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
self.__lower_current_check(tb)
tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========")
def __test_error(self):
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
for errsql in self.__lower_err_check(tb):
tdSql.error(sql=errsql)
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
def all_test(self):
self.__test_current()
self.__test_error()
def __create_tb(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
# tdDnodes.stop(1)
# tdDnodes.start(1)
# tdSql.execute("use db")
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
# self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -61,6 +61,7 @@ class TDTestCase:
tdSql.checkData(0, 0, sum_data)
tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ")
tdSql.query(f"select sum( {condition} ) from {tbname} {where_condition} {group_condition} ")
def __sum_err_check(self,tbanme):
sqls = []
@ -131,74 +132,75 @@ class TDTestCase:
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
def __insert_data(self, rows):
for i in range(9):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( now()-{i*10}s, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )"
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( now()-{i*90}d, {1*i}, {11111*i}, {111*i}, {11*i}, {1.11*i}, {11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )"
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( now()-{i*90}d, {-1*i}, {-11111*i}, {-111*i}, {-11*i}, {-1.11*i}, {-11.11*i}, {i%2}, 'binary{i}', 'nchar{i}', now()+{1*i}a )"
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
'''insert into ct1 values
( now()-45s, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', now()+8a )
( now()+10s, 9, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', now()+9a )
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
now()+{rows * 9-10}d, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
now()+{rows * 9-20}d, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( now()-810d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()-400d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()+{rows * 9}d, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
now()+{rows * 9-10}d, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nachar_limit-1", now()-1d
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
now()+{rows * 9-20}d, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nachar_limit-2", now()-2d
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( now()-{i}h, {i}, {i}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", now()-{i}s )
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( now() + 3h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()-{ ( rows // 2 ) * 60 + 30 }m, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now()-{rows}h, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( now() + 2h, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nachar_limit-1", now()-1d
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
now() + 1h , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nachar_limit-2", now()-2d
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
@ -211,7 +213,7 @@ class TDTestCase:
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(100)
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()

View File

@ -0,0 +1,245 @@
from util.log import *
from util.sql import *
from util.cases import *
from util.dnodes import *
INT_COL = "c1"
BINT_COL = "c2"
SINT_COL = "c3"
TINT_COL = "c4"
FLOAT_COL = "c5"
DOUBLE_COL = "c6"
BOOL_COL = "c7"
BINARY_COL = "c8"
NCHAR_COL = "c9"
TS_COL = "c10"
UN_CHAR_COL = [INT_COL, BINT_COL, SINT_COL, TINT_COL, FLOAT_COL, DOUBLE_COL, BOOL_COL, ]
CHAR_COL = [ BINARY_COL, NCHAR_COL, ]
TS_TYPE_COL = [TS_COL]
class TDTestCase:
def init(self, conn, logSql):
tdLog.debug(f"start to excute {__file__}")
tdSql.init(conn.cursor())
def __upper_condition(self):
upper_condition = []
for char_col in CHAR_COL:
upper_condition.extend(
(
char_col,
f"lower( {char_col} )",
)
)
upper_condition.extend(f"cast( {un_char_col} as binary(16) ) " for un_char_col in UN_CHAR_COL)
upper_condition.extend( f"cast( {char_col} + {char_col_2} as binary(32) ) " for char_col_2 in CHAR_COL )
upper_condition.extend( f"cast( {char_col} + {un_char_col} as binary(32) ) " for un_char_col in UN_CHAR_COL )
upper_condition.append('''"test1234!@#$%^&*():'><?/.,][}{"''')
return upper_condition
def __where_condition(self, col):
# return f" where count({col}) > 0 "
return ""
def __group_condition(self, col, having = ""):
return f" group by {col} having {having}" if having else f" group by {col} "
def __upper_current_check(self, tbname):
upper_condition = self.__upper_condition()
for condition in upper_condition:
where_condition = self.__where_condition(condition)
group_having = self.__group_condition(condition, having=f"{condition} is not null " )
group_no_having= self.__group_condition(condition )
groups = ["", group_having, group_no_having]
for group_condition in groups:
tdSql.query(f"select {condition} from {tbname} {where_condition} {group_condition} ")
datas = [tdSql.getData(i,0) for i in range(tdSql.queryRows)]
upper_data = [ str(data).upper() if data else None for data in datas ]
tdSql.query(f"select upper( {condition} ) from {tbname} {where_condition} {group_condition}")
for i in range(len(upper_data)):
tdSql.checkData(i, 0, upper_data[i] ) if upper_data[i] else tdSql.checkData(i, 0, None)
def __upper_err_check(self,tbname):
sqls = []
for un_char_col in UN_CHAR_COL:
sqls.extend(
(
f"select upper( {un_char_col} ) from {tbname} ",
f"select upper(ceil( {un_char_col} )) from {tbname} ",
f"select {un_char_col} from {tbname} group by upper( {un_char_col} ) ",
)
)
sqls.extend( f"select upper( {un_char_col} + {un_char_col_2} ) from {tbname} " for un_char_col_2 in UN_CHAR_COL )
sqls.extend( f"select upper( {un_char_col} + {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select {char_col} from {tbname} group by upper( {char_col} ) " for char_col in CHAR_COL)
sqls.extend( f"select upper( {ts_col} ) from {tbname} " for ts_col in TS_TYPE_COL )
sqls.extend( f"select upper( {char_col} + {ts_col} ) from {tbname} " for char_col in UN_CHAR_COL for ts_col in TS_TYPE_COL)
sqls.extend( f"select upper( {char_col} + {char_col_2} ) from {tbname} " for char_col in CHAR_COL for char_col_2 in CHAR_COL )
sqls.extend( f"select upper({char_col}, 11) from {tbname} " for char_col in CHAR_COL )
sqls.extend( f"select upper({char_col}) from {tbname} interval(2d) sliding(1d)" for char_col in CHAR_COL )
sqls.extend(
(
f"select upper() from {tbname} ",
f"select upper(*) from {tbname} ",
f"select upper(ccccccc) from {tbname} ",
f"select upper(111) from {tbname} ",
)
)
return sqls
def __test_current(self):
tdLog.printNoPrefix("==========current sql condition check , must return query ok==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
self.__upper_current_check(tb)
tdLog.printNoPrefix(f"==========current sql condition check in {tb} over==========")
def __test_error(self):
tdLog.printNoPrefix("==========err sql condition check , must return error==========")
tbname = ["ct1", "ct2", "ct4", "t1", "stb1"]
for tb in tbname:
for errsql in self.__upper_err_check(tb):
tdSql.error(sql=errsql)
tdLog.printNoPrefix(f"==========err sql condition check in {tb} over==========")
def all_test(self):
self.__test_current()
self.__test_error()
def __create_tb(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
create_stb_sql = f'''create table stb1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
) tags (t1 int)
'''
create_ntb_sql = f'''create table t1(
ts timestamp, {INT_COL} int, {BINT_COL} bigint, {SINT_COL} smallint, {TINT_COL} tinyint,
{FLOAT_COL} float, {DOUBLE_COL} double, {BOOL_COL} bool,
{BINARY_COL} binary(16), {NCHAR_COL} nchar(32), {TS_COL} timestamp
)
'''
tdSql.execute(create_stb_sql)
tdSql.execute(create_ntb_sql)
for i in range(4):
tdSql.execute(f'create table ct{i+1} using stb1 tags ( {i+1} )')
def __insert_data(self, rows):
now_time = int(datetime.datetime.timestamp(datetime.datetime.now()) * 1000)
for i in range(rows):
tdSql.execute(
f"insert into ct1 values ( { now_time - i * 1000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct4 values ( { now_time - i * 7776000000 }, {i}, {11111 * i}, {111 * i % 32767 }, {11 * i % 127}, {1.11*i}, {1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f"insert into ct2 values ( { now_time - i * 7776000000 }, {-i}, {-11111 * i}, {-111 * i % 32767 }, {-11 * i % 127}, {-1.11*i}, {-1100.0011*i}, {i%2}, 'binary{i}', 'nchar{i}', { now_time + 1 * i } )"
)
tdSql.execute(
f'''insert into ct1 values
( { now_time - rows * 5 }, 0, 0, 0, 0, 0, 0, 0, 'binary0', 'nchar0', { now_time + 8 } )
( { now_time + 10000 }, { rows }, -99999, -999, -99, -9.99, -99.99, 1, 'binary9', 'nchar9', { now_time + 9 } )
'''
)
tdSql.execute(
f'''insert into ct4 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000}, {pow(2,31)-pow(2,15)}, {pow(2,63)-pow(2,30)}, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000}
)
(
{ now_time + 2592000000 }, {pow(2,31)-pow(2,16)}, {pow(2,63)-pow(2,31)}, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000}
)
'''
)
tdSql.execute(
f'''insert into ct2 values
( { now_time - rows * 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3888000000+ 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7776000000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
(
{ now_time + 5184000000 }, { -1 * pow(2,31) + pow(2,15) }, { -1 * pow(2,63) + pow(2,30) }, -32766, -126,
{ -1 * 3.2 * pow(10,38) }, { -1.2 * pow(10,308) }, { rows % 2 }, "binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 2592000000 }, { -1 * pow(2,31) + pow(2,16) }, { -1 * pow(2,63) + pow(2,31) }, -32767, -127,
{ - 3.3 * pow(10,38) }, { -1.3 * pow(10,308) }, { (rows-1) % 2 }, "binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
for i in range(rows):
insert_data = f'''insert into t1 values
( { now_time - i * 3600000 }, {i}, {i * 11111}, { i % 32767 }, { i % 127}, { i * 1.11111 }, { i * 1000.1111 }, { i % 2},
"binary_{i}", "nchar_{i}", { now_time - 1000 * i } )
'''
tdSql.execute(insert_data)
tdSql.execute(
f'''insert into t1 values
( { now_time + 10800000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - (( rows // 2 ) * 60 + 30) * 60000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time - rows * 3600000 }, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL )
( { now_time + 7200000 }, { pow(2,31) - pow(2,15) }, { pow(2,63) - pow(2,30) }, 32767, 127,
{ 3.3 * pow(10,38) }, { 1.3 * pow(10,308) }, { rows % 2 },
"binary_limit-1", "nchar_limit-1", { now_time - 86400000 }
)
(
{ now_time + 3600000 } , { pow(2,31) - pow(2,16) }, { pow(2,63) - pow(2,31) }, 32766, 126,
{ 3.2 * pow(10,38) }, { 1.2 * pow(10,308) }, { (rows-1) % 2 },
"binary_limit-2", "nchar_limit-2", { now_time - 172800000 }
)
'''
)
def run(self):
tdSql.prepare()
tdLog.printNoPrefix("==========step1:create table")
self.__create_tb()
tdLog.printNoPrefix("==========step2:insert data")
self.__insert_data(10)
tdLog.printNoPrefix("==========step3:all check")
self.all_test()
# tdDnodes.stop(1)
# tdDnodes.start(1)
# tdSql.execute("use db")
# tdLog.printNoPrefix("==========step4:after wal, all check again ")
# self.all_test()
def stop(self):
tdSql.close()
tdLog.success(f"{__file__} successfully executed")
tdCases.addLinux(__file__, TDTestCase())
tdCases.addWindows(__file__, TDTestCase())

View File

@ -3,10 +3,12 @@ set -e
set -x
python3 ./test.py -f 0-others/taosShell.py
python3 ./test.py -f 0-others/taosShellError.py
python3 ./test.py -f 0-others/taosShellNetChk.py
#python3 ./test.py -f 2-query/between.py
#python3 ./test.py -f 2-query/distinct.py
python3 ./test.py -f 2-query/distinct.py
python3 ./test.py -f 2-query/varchar.py
#python3 ./test.py -f 2-query/timezone.py