Merge branch '3.0' of https://github.com/taosdata/TDengine into enh/row_optimize

This commit is contained in:
Hongze Cheng 2022-11-21 12:13:40 +08:00
commit f46f941c4e
39 changed files with 2063 additions and 466 deletions

View File

@ -2,7 +2,7 @@
# taosws-rs
ExternalProject_Add(taosws-rs
GIT_REPOSITORY https://github.com/taosdata/taos-connector-rust.git
GIT_TAG 9843872
GIT_TAG f406d51
SOURCE_DIR "${TD_SOURCE_DIR}/tools/taosws-rs"
BINARY_DIR ""
#BUILD_IN_SOURCE TRUE

View File

@ -24,7 +24,7 @@ TDengine 知识地图中涵盖了 TDengine 的各种知识点,揭示了各概
<figure>
<center>
<a href="/img/tdengine-map.svg" target="_blank"><img src="/img/tdengine-map.svg" width="80%" /></a>
<a href="pathname:///img/tdengine-map.svg" target="_blank"><img src="/img/tdengine-map.svg" width="80%" /></a>
<figcaption>图 1. TDengine 知识地图</figcaption>
</center>
</figure>

View File

@ -776,7 +776,6 @@ typedef struct STimeSliceOperatorInfo {
SArray* pPrevRow; // SArray<SGroupValue>
SArray* pNextRow; // SArray<SGroupValue>
SArray* pLinearInfo; // SArray<SFillLinearInfo>
bool fillLastPoint;
bool isPrevRowSet;
bool isNextRowSet;
int32_t fillType; // fill type

View File

@ -36,7 +36,8 @@ typedef struct SFillColInfo {
typedef struct SFillLinearInfo {
SPoint start;
SPoint end;
bool hasNull;
bool isStartSet;
bool isEndSet;
int16_t type;
int32_t bytes;
} SFillLinearInfo;

View File

@ -1942,10 +1942,8 @@ static void doKeepPrevRows(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock
for (int32_t i = 0; i < numOfCols; ++i) {
SColumnInfoData* pColInfoData = taosArrayGet(pBlock->pDataBlock, i);
// null data should not be kept since it can not be used to perform interpolation
if (!colDataIsNull_s(pColInfoData, i)) {
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pPrevRow, i);
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pPrevRow, i);
if (!colDataIsNull_s(pColInfoData, rowIndex)) {
pkey->isNull = false;
char* val = colDataGetData(pColInfoData, rowIndex);
if (!IS_VAR_DATA_TYPE(pkey->type)) {
@ -1953,6 +1951,8 @@ static void doKeepPrevRows(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock
} else {
memcpy(pkey->pData, val, varDataLen(val));
}
} else {
pkey->isNull = true;
}
}
@ -1964,10 +1964,8 @@ static void doKeepNextRows(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock
for (int32_t i = 0; i < numOfCols; ++i) {
SColumnInfoData* pColInfoData = taosArrayGet(pBlock->pDataBlock, i);
// null data should not be kept since it can not be used to perform interpolation
if (!colDataIsNull_s(pColInfoData, i)) {
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pNextRow, i);
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pNextRow, i);
if (!colDataIsNull_s(pColInfoData, rowIndex)) {
pkey->isNull = false;
char* val = colDataGetData(pColInfoData, rowIndex);
if (!IS_VAR_DATA_TYPE(pkey->type)) {
@ -1975,50 +1973,51 @@ static void doKeepNextRows(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock
} else {
memcpy(pkey->pData, val, varDataLen(val));
}
} else {
pkey->isNull = true;
}
}
pSliceInfo->isNextRowSet = true;
}
static void doKeepLinearInfo(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock* pBlock, int32_t rowIndex,
bool isLastRow) {
static void doKeepLinearInfo(STimeSliceOperatorInfo* pSliceInfo, const SSDataBlock* pBlock, int32_t rowIndex) {
int32_t numOfCols = taosArrayGetSize(pBlock->pDataBlock);
bool fillLastPoint = pSliceInfo->fillLastPoint;
for (int32_t i = 0; i < numOfCols; ++i) {
SColumnInfoData* pColInfoData = taosArrayGet(pBlock->pDataBlock, i);
SColumnInfoData* pTsCol = taosArrayGet(pBlock->pDataBlock, pSliceInfo->tsCol.slotId);
SFillLinearInfo* pLinearInfo = taosArrayGet(pSliceInfo->pLinearInfo, i);
// null data should not be kept since it can not be used to perform interpolation
if (!colDataIsNull_s(pColInfoData, i)) {
if (isLastRow) {
// null value is represented by using key = INT64_MIN for now.
// TODO: optimize to ignore null values for linear interpolation.
if (!pLinearInfo->isStartSet) {
if (!colDataIsNull_s(pColInfoData, rowIndex)) {
pLinearInfo->start.key = *(int64_t*)colDataGetData(pTsCol, rowIndex);
memcpy(pLinearInfo->start.val, colDataGetData(pColInfoData, rowIndex), pLinearInfo->bytes);
} else if (fillLastPoint) {
}
pLinearInfo->isStartSet = true;
} else if (!pLinearInfo->isEndSet) {
if (!colDataIsNull_s(pColInfoData, rowIndex)) {
pLinearInfo->end.key = *(int64_t*)colDataGetData(pTsCol, rowIndex);
memcpy(pLinearInfo->end.val, colDataGetData(pColInfoData, rowIndex), pLinearInfo->bytes);
}
pLinearInfo->isEndSet = true;
} else {
pLinearInfo->start.key = pLinearInfo->end.key;
memcpy(pLinearInfo->start.val, pLinearInfo->end.val, pLinearInfo->bytes);
if (!colDataIsNull_s(pColInfoData, rowIndex)) {
pLinearInfo->end.key = *(int64_t*)colDataGetData(pTsCol, rowIndex);
memcpy(pLinearInfo->end.val, colDataGetData(pColInfoData, rowIndex), pLinearInfo->bytes);
} else {
pLinearInfo->start.key = *(int64_t*)colDataGetData(pTsCol, rowIndex);
pLinearInfo->end.key = *(int64_t*)colDataGetData(pTsCol, rowIndex + 1);
char* val;
val = colDataGetData(pColInfoData, rowIndex);
memcpy(pLinearInfo->start.val, val, pLinearInfo->bytes);
val = colDataGetData(pColInfoData, rowIndex + 1);
memcpy(pLinearInfo->end.val, val, pLinearInfo->bytes);
pLinearInfo->end.key = INT64_MIN;
}
pLinearInfo->hasNull = false;
} else {
pLinearInfo->hasNull = true;
}
}
pSliceInfo->fillLastPoint = isLastRow;
}
static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp* pExprSup, SSDataBlock* pResBlock) {
static bool genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp* pExprSup, SSDataBlock* pResBlock, bool beforeTs) {
int32_t rows = pResBlock->info.rows;
blockDataEnsureCapacity(pResBlock, rows + 1);
// todo set the correct primary timestamp column
@ -2037,7 +2036,6 @@ static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp
}
int32_t srcSlot = pExprInfo->base.pParam[0].pCol->slotId;
// SColumnInfoData* pSrc = taosArrayGet(pBlock->pDataBlock, srcSlot);
switch (pSliceInfo->fillType) {
case TSDB_FILL_NULL: {
colDataAppendNULL(pDst, rows);
@ -2069,21 +2067,26 @@ static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp
SPoint start = pLinearInfo->start;
SPoint end = pLinearInfo->end;
SPoint current = {.key = pSliceInfo->current};
current.val = taosMemoryCalloc(pLinearInfo->bytes, 1);
// before interp range, do not fill
if (start.key == INT64_MIN || end.key == INT64_MAX) {
// do not interpolate before ts range, only increate pSliceInfo->current
if (beforeTs && !pLinearInfo->isEndSet) {
return true;
}
if (!pLinearInfo->isStartSet || !pLinearInfo->isEndSet) {
hasInterp = false;
break;
}
if (pLinearInfo->hasNull) {
if (start.key == INT64_MIN || end.key == INT64_MIN) {
colDataAppendNULL(pDst, rows);
} else {
taosGetLinearInterpolationVal(&current, pLinearInfo->type, &start, &end, pLinearInfo->type);
colDataAppend(pDst, rows, (char*)current.val, false);
break;
}
current.val = taosMemoryCalloc(pLinearInfo->bytes, 1);
taosGetLinearInterpolationVal(&current, pLinearInfo->type, &start, &end, pLinearInfo->type);
colDataAppend(pDst, rows, (char*)current.val, false);
taosMemoryFree(current.val);
break;
}
@ -2094,7 +2097,11 @@ static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp
}
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pPrevRow, srcSlot);
colDataAppend(pDst, rows, pkey->pData, false);
if (pkey->isNull == false) {
colDataAppend(pDst, rows, pkey->pData, false);
} else {
colDataAppendNULL(pDst, rows);
}
break;
}
@ -2105,7 +2112,11 @@ static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp
}
SGroupKeys* pkey = taosArrayGet(pSliceInfo->pNextRow, srcSlot);
colDataAppend(pDst, rows, pkey->pData, false);
if (pkey->isNull == false) {
colDataAppend(pDst, rows, pkey->pData, false);
} else {
colDataAppendNULL(pDst, rows);
}
break;
}
@ -2118,8 +2129,40 @@ static void genInterpolationResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp
if (hasInterp) {
pResBlock->info.rows += 1;
}
return hasInterp;
}
static void addCurrentRowToResult(STimeSliceOperatorInfo* pSliceInfo, SExprSupp* pExprSup, SSDataBlock* pResBlock,
SSDataBlock* pSrcBlock, int32_t index) {
blockDataEnsureCapacity(pResBlock, pResBlock->info.rows + 1);
for (int32_t j = 0; j < pExprSup->numOfExprs; ++j) {
SExprInfo* pExprInfo = &pExprSup->pExprInfo[j];
int32_t dstSlot = pExprInfo->base.resSchema.slotId;
SColumnInfoData* pDst = taosArrayGet(pResBlock->pDataBlock, dstSlot);
if (IS_TIMESTAMP_TYPE(pExprInfo->base.resSchema.type)) {
colDataAppend(pDst, pResBlock->info.rows, (char*)&pSliceInfo->current, false);
} else {
int32_t srcSlot = pExprInfo->base.pParam[0].pCol->slotId;
SColumnInfoData* pSrc = taosArrayGet(pSrcBlock->pDataBlock, srcSlot);
if (colDataIsNull_s(pSrc, index)) {
colDataAppendNULL(pDst, pResBlock->info.rows);
continue;
}
char* v = colDataGetData(pSrc, index);
colDataAppend(pDst, pResBlock->info.rows, v, false);
}
}
pResBlock->info.rows += 1;
return;
}
static int32_t initPrevRowsKeeper(STimeSliceOperatorInfo* pInfo, SSDataBlock* pBlock) {
if (pInfo->pPrevRow != NULL) {
return TSDB_CODE_SUCCESS;
@ -2190,24 +2233,19 @@ static int32_t initFillLinearInfo(STimeSliceOperatorInfo* pInfo, SSDataBlock* pB
SFillLinearInfo linearInfo = {0};
linearInfo.start.key = INT64_MIN;
linearInfo.end.key = INT64_MAX;
linearInfo.end.key = INT64_MIN;
linearInfo.start.val = taosMemoryCalloc(1, pColInfo->info.bytes);
linearInfo.end.val = taosMemoryCalloc(1, pColInfo->info.bytes);
linearInfo.hasNull = false;
linearInfo.isStartSet = false;
linearInfo.isEndSet = false;
linearInfo.type = pColInfo->info.type;
linearInfo.bytes = pColInfo->info.bytes;
taosArrayPush(pInfo->pLinearInfo, &linearInfo);
}
pInfo->fillLastPoint = false;
return TSDB_CODE_SUCCESS;
}
static bool needToFillLastPoint(STimeSliceOperatorInfo* pSliceInfo) {
return (pSliceInfo->fillLastPoint == true && pSliceInfo->fillType == TSDB_FILL_LINEAR);
}
static int32_t initKeeperInfo(STimeSliceOperatorInfo* pInfo, SSDataBlock* pBlock) {
int32_t code;
code = initPrevRowsKeeper(pInfo, pBlock);
@ -2263,195 +2301,73 @@ static SSDataBlock* doTimeslice(SOperatorInfo* pOperator) {
for (int32_t i = 0; i < pBlock->info.rows; ++i) {
int64_t ts = *(int64_t*)colDataGetData(pTsCol, i);
if (i == 0 && needToFillLastPoint(pSliceInfo)) { // first row in current block
doKeepLinearInfo(pSliceInfo, pBlock, i, false);
while (pSliceInfo->current < ts && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
if (ts == pSliceInfo->current) {
blockDataEnsureCapacity(pResBlock, pResBlock->info.rows + 1);
for (int32_t j = 0; j < pOperator->exprSupp.numOfExprs; ++j) {
SExprInfo* pExprInfo = &pOperator->exprSupp.pExprInfo[j];
addCurrentRowToResult(pSliceInfo, &pOperator->exprSupp, pResBlock, pBlock, i);
int32_t dstSlot = pExprInfo->base.resSchema.slotId;
SColumnInfoData* pDst = taosArrayGet(pResBlock->pDataBlock, dstSlot);
if (IS_TIMESTAMP_TYPE(pExprInfo->base.resSchema.type)) {
colDataAppend(pDst, pResBlock->info.rows, (char*)&pSliceInfo->current, false);
} else {
int32_t srcSlot = pExprInfo->base.pParam[0].pCol->slotId;
SColumnInfoData* pSrc = taosArrayGet(pBlock->pDataBlock, srcSlot);
if (colDataIsNull_s(pSrc, i)) {
colDataAppendNULL(pDst, pResBlock->info.rows);
continue;
}
char* v = colDataGetData(pSrc, i);
colDataAppend(pDst, pResBlock->info.rows, v, false);
}
}
pResBlock->info.rows += 1;
doKeepPrevRows(pSliceInfo, pBlock, i);
doKeepLinearInfo(pSliceInfo, pBlock, i);
// for linear interpolation, always fill value between this and next points;
// if its the first point in data block, also fill values between previous(if there's any) and this point;
// if its the last point in data block, no need to fill, but reserve this point as the start value and do
// the interpolation when processing next data block.
if (pSliceInfo->fillType == TSDB_FILL_LINEAR) {
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (i < pBlock->info.rows - 1) {
doKeepLinearInfo(pSliceInfo, pBlock, i, false);
int64_t nextTs = *(int64_t*)colDataGetData(pTsCol, i + 1);
if (nextTs > pSliceInfo->current) {
while (pSliceInfo->current < nextTs && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current = taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit,
pInterval->precision);
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
}
} else { // it is the last row of current block
// store ts value as start, and calculate interp value when processing next block
doKeepLinearInfo(pSliceInfo, pBlock, i, true);
}
} else { // non-linear interpolation
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
} else if (ts < pSliceInfo->current) {
// in case of interpolation window starts and ends between two datapoints, fill(prev) need to interpolate
doKeepPrevRows(pSliceInfo, pBlock, i);
doKeepLinearInfo(pSliceInfo, pBlock, i);
if (pSliceInfo->fillType == TSDB_FILL_LINEAR) {
// no need to increate pSliceInfo->current here
// pSliceInfo->current =
// taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (i < pBlock->info.rows - 1) {
doKeepLinearInfo(pSliceInfo, pBlock, i, false);
int64_t nextTs = *(int64_t*)colDataGetData(pTsCol, i + 1);
if (nextTs > pSliceInfo->current) {
while (pSliceInfo->current < nextTs && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current = taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit,
pInterval->precision);
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
if (i < pBlock->info.rows - 1) {
// in case of interpolation window starts and ends between two datapoints, fill(next) need to interpolate
doKeepNextRows(pSliceInfo, pBlock, i + 1);
int64_t nextTs = *(int64_t*)colDataGetData(pTsCol, i + 1);
if (nextTs > pSliceInfo->current) {
while (pSliceInfo->current < nextTs && pSliceInfo->current <= pSliceInfo->win.ekey) {
if (!genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock, false) && pSliceInfo->fillType == TSDB_FILL_LINEAR) {
break;
} else {
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
} else {
// store ts value as start, and calculate interp value when processing next block
doKeepLinearInfo(pSliceInfo, pBlock, i, true);
}
} else { // non-linear interpolation
if (i < pBlock->info.rows - 1) {
// in case of interpolation window starts and ends between two datapoints, fill(next) need to interpolate
doKeepNextRows(pSliceInfo, pBlock, i + 1);
int64_t nextTs = *(int64_t*)colDataGetData(pTsCol, i + 1);
if (nextTs > pSliceInfo->current) {
while (pSliceInfo->current < nextTs && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current = taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit,
pInterval->precision);
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
} else {
// ignore current row, and do nothing
}
} else { // it is the last row of current block
doKeepPrevRows(pSliceInfo, pBlock, i);
// ignore current row, and do nothing
}
} else { // it is the last row of current block
doKeepPrevRows(pSliceInfo, pBlock, i);
}
} else { // ts > pSliceInfo->current
// in case of interpolation window starts and ends between two datapoints, fill(next) need to interpolate
doKeepNextRows(pSliceInfo, pBlock, i);
doKeepLinearInfo(pSliceInfo, pBlock, i);
while (pSliceInfo->current < ts && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (!genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock, true) && pSliceInfo->fillType == TSDB_FILL_LINEAR) {
break;
} else {
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}
}
// add current row if timestamp match
if (ts == pSliceInfo->current && pSliceInfo->current <= pSliceInfo->win.ekey) {
blockDataEnsureCapacity(pResBlock, pResBlock->info.rows + 1);
for (int32_t j = 0; j < pOperator->exprSupp.numOfExprs; ++j) {
SExprInfo* pExprInfo = &pOperator->exprSupp.pExprInfo[j];
int32_t dstSlot = pExprInfo->base.resSchema.slotId;
SColumnInfoData* pDst = taosArrayGet(pResBlock->pDataBlock, dstSlot);
if (IS_TIMESTAMP_TYPE(pExprInfo->base.resSchema.type)) {
colDataAppend(pDst, pResBlock->info.rows, (char*)&pSliceInfo->current, false);
} else {
int32_t srcSlot = pExprInfo->base.pParam[0].pCol->slotId;
SColumnInfoData* pSrc = taosArrayGet(pBlock->pDataBlock, srcSlot);
if (colDataIsNull_s(pSrc, i)) {
colDataAppendNULL(pDst, pResBlock->info.rows);
continue;
}
char* v = colDataGetData(pSrc, i);
colDataAppend(pDst, pResBlock->info.rows, v, false);
}
}
pResBlock->info.rows += 1;
addCurrentRowToResult(pSliceInfo, &pOperator->exprSupp, pResBlock, pBlock, i);
doKeepPrevRows(pSliceInfo, pBlock, i);
if (pSliceInfo->fillType == TSDB_FILL_LINEAR) {
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
if (i < pBlock->info.rows - 1) {
doKeepLinearInfo(pSliceInfo, pBlock, i, false);
int64_t nextTs = *(int64_t*)colDataGetData(pTsCol, i + 1);
if (nextTs > pSliceInfo->current) {
while (pSliceInfo->current < nextTs && pSliceInfo->current <= pSliceInfo->win.ekey) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
pSliceInfo->current = taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit,
pInterval->precision);
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
setOperatorCompleted(pOperator);
break;
}
}
} else { // it is the last row of current block
// store ts value as start, and calculate interp value when processing next block
doKeepLinearInfo(pSliceInfo, pBlock, i, true);
}
} else { // non-linear interpolation
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}
if (pSliceInfo->current > pSliceInfo->win.ekey) {
@ -2466,7 +2382,7 @@ static SSDataBlock* doTimeslice(SOperatorInfo* pOperator) {
// except for fill(next), fill(linear)
while (pSliceInfo->current <= pSliceInfo->win.ekey && pSliceInfo->fillType != TSDB_FILL_NEXT &&
pSliceInfo->fillType != TSDB_FILL_LINEAR) {
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock);
genInterpolationResult(pSliceInfo, &pOperator->exprSupp, pResBlock, false);
pSliceInfo->current =
taosTimeAdd(pSliceInfo->current, pInterval->interval, pInterval->intervalUnit, pInterval->precision);
}

View File

@ -438,11 +438,11 @@
,,,system-test,python3 ./test.py -f 1-insert/database_pre_suf.py
,,,system-test,python3 ./test.py -f 1-insert/InsertFuturets.py
,,,system-test,python3 ./test.py -f 0-others/show.py
,,,system-test,python3 ./test.py -f 2-query/abs.py
,,,system-test,./pytest.sh python3 ./test.py -f 2-query/abs.py
,,,system-test,python3 ./test.py -f 2-query/abs.py -R
,,,system-test,python3 ./test.py -f 2-query/and_or_for_byte.py
,,,system-test,python3 ./test.py -f 2-query/and_or_for_byte.py -R
,,,system-test,python3 ./test.py -f 2-query/apercentile.py
,,,system-test,./pytest.sh python3 ./test.py -f 2-query/apercentile.py
,,,system-test,python3 ./test.py -f 2-query/apercentile.py -R
,,,system-test,python3 ./test.py -f 2-query/arccos.py
,,,system-test,python3 ./test.py -f 2-query/arccos.py -R

View File

@ -45,7 +45,7 @@ fi
# Now getting ready to execute Python
# The following is the default of our standard dev env (Ubuntu 20.04), modify/adjust at your own risk
PYTHON_EXEC=python3.8
PYTHON_EXEC=python3
# First we need to set up a path for Python to find our own TAOS modules, so that "import" can work.
# export PYTHONPATH=$(pwd)/../../src/connector/python:$(pwd)

View File

@ -37,6 +37,7 @@ import requests
# from guppy import hpy
import gc
import taos
from taos.tmq import *
from .shared.types import TdColumns, TdTags
@ -419,10 +420,12 @@ class ThreadCoordinator:
except threading.BrokenBarrierError as err:
self._execStats.registerFailure("Aborted due to worker thread timeout")
Logging.error("\n")
Logging.error("Main loop aborted, caused by worker thread(s) time-out of {} seconds".format(
ThreadCoordinator.WORKER_THREAD_TIMEOUT))
Logging.error("TAOS related threads blocked at (stack frames top-to-bottom):")
ts = ThreadStacks()
ts.record_current_time(time.time()) # record thread exit time at current moment
ts.print(filterInternal=True)
workerTimeout = True
@ -546,7 +549,12 @@ class ThreadCoordinator:
# pick a task type for current state
db = self.pickDatabase()
taskType = db.getStateMachine().pickTaskType() # dynamic name of class
if Dice.throw(2)==1:
taskType = db.getStateMachine().pickTaskType() # dynamic name of class
else:
taskType = db.getStateMachine().balance_pickTaskType() # and an method can get balance task types
pass
return taskType(self._execStats, db) # create a task from it
def resetExecutedTasks(self):
@ -674,9 +682,15 @@ class AnyState:
# only "under normal circumstances", as we may override it with the -b option
CAN_DROP_DB = 2
CAN_CREATE_FIXED_SUPER_TABLE = 3
CAN_CREATE_STREAM = 3 # super table must exists
CAN_CREATE_TOPIC = 3 # super table must exists
CAN_CREATE_CONSUMERS = 3
CAN_DROP_FIXED_SUPER_TABLE = 4
CAN_DROP_TOPIC = 4
CAN_DROP_STREAM = 4
CAN_ADD_DATA = 5
CAN_READ_DATA = 6
CAN_DELETE_DATA = 6
def __init__(self):
self._info = self.getInfo()
@ -727,12 +741,30 @@ class AnyState:
return False
return self._info[self.CAN_DROP_FIXED_SUPER_TABLE]
def canCreateTopic(self):
return self._info[self.CAN_CREATE_TOPIC]
def canDropTopic(self):
return self._info[self.CAN_DROP_TOPIC]
def canCreateConsumers(self):
return self._info[self.CAN_CREATE_CONSUMERS]
def canCreateStreams(self):
return self._info[self.CAN_CREATE_STREAM]
def canDropStream(self):
return self._info[self.CAN_DROP_STREAM]
def canAddData(self):
return self._info[self.CAN_ADD_DATA]
def canReadData(self):
return self._info[self.CAN_READ_DATA]
def canDeleteData(self):
return self._info[self.CAN_DELETE_DATA]
def assertAtMostOneSuccess(self, tasks, cls):
sCnt = 0
for task in tasks:
@ -902,7 +934,7 @@ class StateHasData(AnyState):
): # only if we didn't create one
# we shouldn't have dropped it
self.assertNoTask(tasks, TaskDropDb)
if (not self.hasTask(tasks, TaskCreateSuperTable)
if not( self.hasTask(tasks, TaskCreateSuperTable)
): # if we didn't create the table
# we should not have a task that drops it
self.assertNoTask(tasks, TaskDropSuperTable)
@ -974,14 +1006,21 @@ class StateMechine:
# did not do this when openning connection, and this is NOT the worker
# thread, which does this on their own
dbc.use(dbName)
if not dbc.hasTables(): # no tables
Logging.debug("[STT] DB_ONLY found, between {} and {}".format(ts, time.time()))
return StateDbOnly()
# For sure we have tables, which means we must have the super table. # TODO: are we sure?
sTable = self._db.getFixedSuperTable()
if sTable.hasRegTables(dbc): # no regular tables
if sTable.hasRegTables(dbc): # no regular tables
# print("debug=====*\n"*100)
Logging.debug("[STT] SUPER_TABLE_ONLY found, between {} and {}".format(ts, time.time()))
return StateSuperTableOnly()
else: # has actual tables
Logging.debug("[STT] HAS_DATA found, between {} and {}".format(ts, time.time()))
@ -1051,6 +1090,28 @@ class StateMechine:
# Logging.debug(" (weighted random:{}/{}) ".format(i, len(taskTypes)))
return taskTypes[i]
def balance_pickTaskType(self):
# all the task types we can choose from at curent state
BasicTypes = self.getTaskTypes()
weightsTypes = BasicTypes.copy()
# this matrixs can balance the Frequency of TaskTypes
balance_TaskType_matrixs = {'TaskDropDb': 5 , 'TaskDropTopics': 20 , 'TaskDropStreams':10 , 'TaskDropStreamTables':10 ,
'TaskReadData':50 , 'TaskDropSuperTable':5 , 'TaskAlterTags':3 , 'TaskAddData':10,
'TaskDeleteData':10 , 'TaskCreateDb':10 , 'TaskCreateStream': 3, 'TaskCreateTopic' :3,
'TaskCreateConsumers':10, 'TaskCreateSuperTable': 10 } # TaskType : balance_matrixs of task
for task , weights in balance_TaskType_matrixs.items():
for basicType in BasicTypes:
if basicType.__name__ == task:
for _ in range(weights):
weightsTypes.append(basicType)
task = random.sample(weightsTypes,1)
return task[0]
# ref:
# https://eli.thegreenplace.net/2010/01/22/weighted-random-generation-in-python/
def _weighted_choice_sub(self, weights) -> int:
@ -1109,6 +1170,7 @@ class Database:
return "fs_table"
def getFixedSuperTable(self) -> TdSuperTable:
return TdSuperTable(self.getFixedSuperTableName(), self.getName())
# We aim to create a starting time tick, such that, whenever we run our test here once
@ -1342,6 +1404,19 @@ class Task():
0x2603, # Table does not exist, replaced by 2662 below
0x260d, # Tags number not matched
0x2662, # Table does not exist #TODO: what about 2603 above?
0x2600, # database not specified, SQL: show stables , database droped , and show tables
0x032C, # Object is creating
0x032D, # Object is dropping
0x03D3, # Conflict transaction not completed
0x0707, # Query not ready , it always occur at replica 3
0x707, # Query not ready
0x396, # Database in creating status
0x386, # Database in droping status
0x03E1, # failed on tmq_subscribe ,topic not exist
0x03ed , # Topic must be dropped first, SQL: drop database db_0
0x0203 , # Invalid value
0x03f0 , # Stream already exist , topic already exists
@ -1638,9 +1713,12 @@ class TaskCreateDb(StateTransitionTask):
# numReplica = Dice.throw(Settings.getConfig().max_replicas) + 1 # 1,2 ... N
numReplica = Config.getConfig().num_replicas # fixed, always
repStr = "replica {}".format(numReplica)
updatePostfix = "update 1" if Config.getConfig().verify_data else "" # allow update only when "verify data" is active
updatePostfix = "" if Config.getConfig().verify_data else "" # allow update only when "verify data" is active , 3.0 version default is update 1
vg_nums = random.randint(1,8)
cache_model = Dice.choice(['none' , 'last_row' , 'last_value' , 'both'])
buffer = random.randint(3,128)
dbName = self._db.getName()
self.execWtSql(wt, "create database {} {} {} ".format(dbName, repStr, updatePostfix ) )
self.execWtSql(wt, "create database {} {} {} vgroups {} cachemodel '{}' buffer {} ".format(dbName, repStr, updatePostfix, vg_nums, cache_model,buffer ) )
if dbName == "db_0" and Config.getConfig().use_shadow_db:
self.execWtSql(wt, "create database {} {} {} ".format("db_s", repStr, updatePostfix ) )
@ -1654,9 +1732,211 @@ class TaskDropDb(StateTransitionTask):
return state.canDropDb()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
self.execWtSql(wt, "drop database {}".format(self._db.getName()))
try:
self.queryWtSql(wt, "drop database {}".format(self._db.getName())) # drop database maybe failed ,because topic exists
except taos.error.ProgrammingError as err:
errno = Helper.convertErrno(err.errno)
if errno in [0x0203]: # drop maybe failed
pass
Logging.debug("[OPS] database dropped at {}".format(time.time()))
class TaskCreateStream(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canCreateStreams()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
dbname = self._db.getName()
sub_stream_name = dbname+ '_sub_stream'
sub_stream_tb_name = 'stream_tb_sub'
super_stream_name = dbname+ '_super_stream'
super_stream_tb_name = 'stream_tb_super'
if not self._db.exists(wt.getDbConn()):
Logging.debug("Skipping task, no DB yet")
return
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
stbname =sTable.getName()
sub_tables = sTable.getRegTables(wt.getDbConn())
aggExpr = Dice.choice([
'count(*)', 'avg(speed)', 'sum(speed)', 'stddev(speed)','min(speed)', 'max(speed)', 'first(speed)', 'last(speed)',
'apercentile(speed, 10)', 'last_row(*)', 'twa(speed)'])
stream_sql = '' # set default value
if sub_tables:
sub_tbname = sub_tables[0]
# create stream with query above sub_table
stream_sql = 'create stream {} into {}.{} as select {}, avg(speed) FROM {}.{} PARTITION BY tbname INTERVAL(5s) SLIDING(3s) '.\
format(sub_stream_name,dbname,sub_stream_tb_name ,aggExpr,dbname,sub_tbname)
else:
stream_sql = 'create stream {} into {}.{} as select {}, avg(speed) FROM {}.{} PARTITION BY tbname INTERVAL(5s) SLIDING(3s) '.\
format(super_stream_name,dbname,super_stream_tb_name,aggExpr, dbname,stbname)
self.execWtSql(wt, stream_sql)
Logging.debug("[OPS] stream is creating at {}".format(time.time()))
class TaskCreateTopic(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canCreateTopic()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
dbname = self._db.getName()
sub_topic_name = dbname+ '_sub_topic'
super_topic_name = dbname+ '_super_topic'
stable_topic = dbname+ '_stable_topic'
db_topic = 'database_' + dbname+ '_topics'
if not self._db.exists(wt.getDbConn()):
Logging.debug("Skipping task, no DB yet")
return
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
# create topic if not exists topic_ctb_column as select ts, c1, c2, c3 from stb1;
stbname =sTable.getName()
sub_tables = sTable.getRegTables(wt.getDbConn())
scalarExpr = Dice.choice([ '*','speed','color','abs(speed)','acos(speed)','asin(speed)','atan(speed)','ceil(speed)','cos(speed)','cos(speed)',
'floor(speed)','log(speed,2)','pow(speed,2)','round(speed)','sin(speed)','sqrt(speed)','char_length(color)','concat(color,color)',
'concat_ws(" ", color,color," ")','length(color)', 'lower(color)', 'ltrim(color)','substr(color , 2)','upper(color)','cast(speed as double)',
'cast(ts as bigint)'])
topic_sql = '' # set default value
if Dice.throw(3)==0: # create topic : source data from sub query
if sub_tables: # if not empty
sub_tbname = sub_tables[0]
# create topic : source data from sub query of sub stable
topic_sql = 'create topic {} as select {} FROM {}.{} ; '.format(sub_topic_name,scalarExpr,dbname,sub_tbname)
else: # create topic : source data from sub query of stable
topic_sql = 'create topic {} as select {} FROM {}.{} '.format(super_topic_name,scalarExpr, dbname,stbname)
elif Dice.throw(3)==1: # create topic : source data from super table
topic_sql = 'create topic {} AS STABLE {}.{} '.format(stable_topic,dbname,stbname)
elif Dice.throw(3)==2: # create topic : source data from whole database
topic_sql = 'create topic {} AS DATABASE {} '.format(db_topic,dbname)
else:
pass
# exec create topics
self.execWtSql(wt, "use {}".format(dbname))
self.execWtSql(wt, topic_sql)
Logging.debug("[OPS] db topic is creating at {}".format(time.time()))
class TaskDropTopics(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canDropTopic()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
dbname = self._db.getName()
if not self._db.exists(wt.getDbConn()):
Logging.debug("Skipping task, no DB yet")
return
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
tblName = sTable.getName()
if sTable.hasTopics(wt.getDbConn()):
sTable.dropTopics(wt.getDbConn(),dbname,None) # drop topics of database
sTable.dropTopics(wt.getDbConn(),dbname,tblName) # drop topics of stable
class TaskDropStreams(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canDropStream()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
# dbname = self._db.getName()
if not self._db.exists(wt.getDbConn()):
Logging.debug("Skipping task, no DB yet")
return
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
# tblName = sTable.getName()
if sTable.hasStreams(wt.getDbConn()):
sTable.dropStreams(wt.getDbConn()) # drop stream of database
class TaskDropStreamTables(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canDropStream()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
# dbname = self._db.getName()
if not self._db.exists(wt.getDbConn()):
Logging.debug("Skipping task, no DB yet")
return
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
wt.execSql("use db") # should always be in place
# tblName = sTable.getName()
if sTable.hasStreamTables(wt.getDbConn()):
sTable.dropStreamTables(wt.getDbConn()) # drop stream tables
class TaskCreateConsumers(StateTransitionTask):
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canCreateConsumers()
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
if Config.getConfig().connector_type == 'native':
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
if sTable.hasTopics(wt.getDbConn()):
sTable.createConsumer(wt.getDbConn(),random.randint(1,10))
pass
else:
print(" restful not support tmq consumers")
return
class TaskCreateSuperTable(StateTransitionTask):
@classmethod
def getEndState(cls):
@ -1673,7 +1953,7 @@ class TaskCreateSuperTable(StateTransitionTask):
sTable = self._db.getFixedSuperTable() # type: TdSuperTable
# wt.execSql("use db") # should always be in place
sTable.create(wt.getDbConn(),
{'ts': TdDataType.TIMESTAMP, 'speed': TdDataType.INT, 'color': TdDataType.BINARY16}, {
'b': TdDataType.BINARY200, 'f': TdDataType.FLOAT},
@ -1688,14 +1968,17 @@ class TdSuperTable:
def __init__(self, stName, dbName):
self._stName = stName
self._dbName = dbName
self._consumerLists = {}
self._ConsumerInsts = []
def getName(self):
return self._stName
def drop(self, dbc, skipCheck = False):
dbName = self._dbName
if self.exists(dbc) : # if myself exists
fullTableName = dbName + '.' + self._stName
fullTableName = dbName + '.' + self._stName
dbc.execute("DROP TABLE {}".format(fullTableName))
else:
if not skipCheck:
@ -1711,10 +1994,12 @@ class TdSuperTable:
dbName = self._dbName
dbc.execute("USE " + dbName)
fullTableName = dbName + '.' + self._stName
fullTableName = dbName + '.' + self._stName
if dbc.existsSuperTable(self._stName):
if dropIfExists:
dbc.execute("DROP TABLE {}".format(fullTableName))
if dropIfExists:
dbc.execute("DROP TABLE {}".format(fullTableName))
else: # error
raise CrashGenError("Cannot create super table, already exists: {}".format(self._stName))
@ -1728,12 +2013,61 @@ class TdSuperTable:
)
else:
sql += " TAGS (dummy int) "
dbc.execute(sql)
dbc.execute(sql)
def createConsumer(self, dbc,Consumer_nums):
def generateConsumer(current_topic_list):
conf = TaosTmqConf()
conf.set("group.id", "tg2")
conf.set("td.connect.user", "root")
conf.set("td.connect.pass", "taosdata")
conf.set("enable.auto.commit", "true")
def tmq_commit_cb_print(tmq, resp, offset, param=None):
print(f"commit: {resp}, tmq: {tmq}, offset: {offset}, param: {param}")
conf.set_auto_commit_cb(tmq_commit_cb_print, None)
consumer = conf.new_consumer()
topic_list = TaosTmqList()
for topic in current_topic_list:
topic_list.append(topic)
try:
consumer.subscribe(topic_list)
except TmqError as e :
pass
# consumer with random work life
time_start = time.time()
while 1:
res = consumer.poll(1000)
if time.time() - time_start >random.randint(5,50) :
break
try:
consumer.unsubscribe()
except TmqError as e :
pass
return
# mulit Consumer
current_topic_list = self.getTopicLists(dbc)
for i in range(Consumer_nums):
consumer_inst = threading.Thread(target=generateConsumer, args=(current_topic_list,))
self._ConsumerInsts.append(consumer_inst)
for ConsumerInst in self._ConsumerInsts:
ConsumerInst.start()
for ConsumerInst in self._ConsumerInsts:
ConsumerInst.join()
def getTopicLists(self, dbc: DbConn):
dbc.query("show topics ")
topics = dbc.getQueryResult()
topicLists = [v[0] for v in topics]
return topicLists
def getRegTables(self, dbc: DbConn):
dbName = self._dbName
try:
dbc.query("select TBNAME from {}.{}".format(dbName, self._stName)) # TODO: analyze result set later
dbc.query("select distinct TBNAME from {}.{}".format(dbName, self._stName)) # TODO: analyze result set later
except taos.error.ProgrammingError as err:
errno2 = Helper.convertErrno(err.errno)
Logging.debug("[=] Failed to get tables from super table: errno=0x{:X}, msg: {}".format(errno2, err))
@ -1743,7 +2077,75 @@ class TdSuperTable:
return [v[0] for v in qr] # list transformation, ref: https://stackoverflow.com/questions/643823/python-list-transformation
def hasRegTables(self, dbc: DbConn):
return dbc.query("SELECT * FROM {}.{}".format(self._dbName, self._stName)) > 0
if dbc.existsSuperTable(self._stName):
return dbc.query("SELECT * FROM {}.{}".format(self._dbName, self._stName)) > 0
else:
return False
def hasStreamTables(self,dbc: DbConn):
return dbc.query("show {}.stables like 'stream_tb%'".format(self._dbName)) > 0
def hasStreams(self,dbc: DbConn):
return dbc.query("show streams") > 0
def hasTopics(self,dbc: DbConn):
return dbc.query("show topics") > 0
def dropTopics(self,dbc: DbConn , dbname=None,stb_name=None):
dbc.query("show topics ")
topics = dbc.getQueryResult()
if dbname !=None and stb_name == None :
for topic in topics:
if dbname in topic[0] and topic[0].startswith("database"):
try:
dbc.execute('drop topic {}'.format(topic[0]))
Logging.debug("[OPS] topic {} is droping at {}".format(topic,time.time()))
except taos.error.ProgrammingError as err:
errno = Helper.convertErrno(err.errno)
if errno in [0x03EB]: # Topic subscribed cannot be dropped
pass
# for subsript in subscriptions:
else:
pass
pass
return True
elif dbname !=None and stb_name!= None:
for topic in topics:
if topic[0].startswith(self._dbName) and topic[0].endswith('topic'):
dbc.execute('drop topic {}'.format(topic[0]))
Logging.debug("[OPS] topic {} is droping at {}".format(topic,time.time()))
return True
else:
return True
pass
def dropStreams(self,dbc:DbConn):
dbc.query("show streams ")
Streams = dbc.getQueryResult()
for Stream in Streams:
if Stream[0].startswith(self._dbName):
dbc.execute('drop stream {}'.format(Stream[0]))
return not dbc.query("show streams ") > 0
def dropStreamTables(self, dbc: DbConn):
dbc.query("show {}.stables like 'stream_tb%'".format(self._dbName))
StreamTables = dbc.getQueryResult()
for StreamTable in StreamTables:
if self.dropStreams(dbc):
dbc.execute('drop table {}.{}'.format(self._dbName,StreamTable[0]))
return not dbc.query("show {}.stables like 'stream_tb%'".format(self._dbName))
def ensureRegTable(self, task: Optional[Task], dbc: DbConn, regTableName: str):
'''
@ -1838,10 +2240,46 @@ class TdSuperTable:
# Run the query against the regular table first
doAggr = (Dice.throw(2) == 0) # 1 in 2 chance
if not doAggr: # don't do aggregate query, just simple one
commonExpr = Dice.choice([
'*',
'abs(speed)',
'acos(speed)',
'asin(speed)',
'atan(speed)',
'ceil(speed)',
'cos(speed)',
'cos(speed)',
'floor(speed)',
'log(speed,2)',
'pow(speed,2)',
'round(speed)',
'sin(speed)',
'sqrt(speed)',
'char_length(color)',
'concat(color,color)',
'concat_ws(" ", color,color," ")',
'length(color)',
'lower(color)',
'ltrim(color)',
'substr(color , 2)',
'upper(color)',
'cast(speed as double)',
'cast(ts as bigint)',
# 'TO_ISO8601(color)',
# 'TO_UNIXTIMESTAMP(ts)',
'now()',
'timediff(ts,now)',
'timezone()',
'TIMETRUNCATE(ts,1s)',
'TIMEZONE()',
'TODAY()',
'distinct(color)'
]
)
ret.append(SqlQuery( # reg table
"select {} from {}.{}".format('*', self._dbName, rTbName)))
"select {} from {}.{}".format(commonExpr, self._dbName, rTbName)))
ret.append(SqlQuery( # super table
"select {} from {}.{}".format('*', self._dbName, self.getName())))
"select {} from {}.{}".format(commonExpr, self._dbName, self.getName())))
else: # Aggregate query
aggExpr = Dice.choice([
'count(*)',
@ -1857,17 +2295,34 @@ class TdSuperTable:
'top(speed, 50)', # TODO: not supported?
'bottom(speed, 50)', # TODO: not supported?
'apercentile(speed, 10)', # TODO: TD-1316
# 'last_row(speed)', # TODO: commented out per TD-3231, we should re-create
'last_row(*)', # TODO: commented out per TD-3231, we should re-create
# Transformation Functions
# 'diff(speed)', # TODO: no supported?!
'spread(speed)'
'spread(speed)',
'elapsed(ts)',
'mode(speed)',
'bottom(speed,1)',
'top(speed,1)',
'tail(speed,1)',
'unique(color)',
'csum(speed)',
'DERIVATIVE(speed,1s,1)',
'diff(speed,1)',
'irate(speed)',
'mavg(speed,3)',
'sample(speed,5)',
'STATECOUNT(speed,"LT",1)',
'STATEDURATION(speed,"LT",1)',
'twa(speed)'
]) # TODO: add more from 'top'
# if aggExpr not in ['stddev(speed)']: # STDDEV not valid for super tables?! (Done in TD-1049)
sql = "select {} from {}.{}".format(aggExpr, self._dbName, self.getName())
if Dice.throw(3) == 0: # 1 in X chance
sql = sql + ' GROUP BY color'
partion_expr = Dice.choice(['color','tbname'])
sql = sql + ' partition BY ' + partion_expr + ' order by ' + partion_expr
Progress.emit(Progress.QUERY_GROUP_BY)
# Logging.info("Executing GROUP-BY query: " + sql)
ret.append(SqlQuery(sql))
@ -1974,6 +2429,7 @@ class TaskDropSuperTable(StateTransitionTask):
isSuccess = False
Logging.debug("[DB] Acceptable error when dropping a table")
continue # try to delete next regular table
if (not tickOutput):
tickOutput = True # Print only one time
@ -1985,6 +2441,8 @@ class TaskDropSuperTable(StateTransitionTask):
# Drop the super table itself
tblName = self._db.getFixedSuperTableName()
self.execWtSql(wt, "drop table {}.{}".format(self._db.getName(), tblName))
class TaskAlterTags(StateTransitionTask):
@ -2234,6 +2692,220 @@ class TaskAddData(StateTransitionTask):
self.activeTable.discard(i) # not raising an error, unlike remove
class TaskDeleteData(StateTransitionTask):
# Track which table is being actively worked on
activeTable: Set[int] = set()
# We use these two files to record operations to DB, useful for power-off tests
fAddLogReady = None # type: Optional[io.TextIOWrapper]
fAddLogDone = None # type: Optional[io.TextIOWrapper]
@classmethod
def prepToRecordOps(cls):
if Config.getConfig().record_ops:
if (cls.fAddLogReady is None):
Logging.info(
"Recording in a file operations to be performed...")
cls.fAddLogReady = open("add_log_ready.txt", "w")
if (cls.fAddLogDone is None):
Logging.info("Recording in a file operations completed...")
cls.fAddLogDone = open("add_log_done.txt", "w")
@classmethod
def getEndState(cls):
return StateHasData()
@classmethod
def canBeginFrom(cls, state: AnyState):
return state.canDeleteData()
def _lockTableIfNeeded(self, fullTableName, extraMsg = ''):
if Config.getConfig().verify_data:
# Logging.info("Locking table: {}".format(fullTableName))
self.lockTable(fullTableName)
# Logging.info("Table locked {}: {}".format(extraMsg, fullTableName))
# print("_w" + str(nextInt % 100), end="", flush=True) # Trace what was written
else:
# Logging.info("Skipping locking table")
pass
def _unlockTableIfNeeded(self, fullTableName):
if Config.getConfig().verify_data:
# Logging.info("Unlocking table: {}".format(fullTableName))
self.unlockTable(fullTableName)
# Logging.info("Table unlocked: {}".format(fullTableName))
else:
pass
# Logging.info("Skipping unlocking table")
def _deleteData(self, db: Database, dbc, regTableName, te: TaskExecutor): # implied: NOT in batches
numRecords = self.LARGE_NUMBER_OF_RECORDS if Config.getConfig().larger_data else self.SMALL_NUMBER_OF_RECORDS
del_Records = int(numRecords/5)
if Dice.throw(2) == 0:
for j in range(del_Records): # number of records per table
intToWrite = db.getNextInt()
nextTick = db.getNextTick()
# nextColor = db.getNextColor()
if Config.getConfig().record_ops:
self.prepToRecordOps()
if self.fAddLogReady is None:
raise CrashGenError("Unexpected empty fAddLogReady")
self.fAddLogReady.write("Ready to delete {} to {}\n".format(intToWrite, regTableName))
self.fAddLogReady.flush()
os.fsync(self.fAddLogReady.fileno())
# TODO: too ugly trying to lock the table reliably, refactor...
fullTableName = db.getName() + '.' + regTableName
self._lockTableIfNeeded(fullTableName) # so that we are verify read-back. TODO: deal with exceptions before unlock
try:
sql = "delete from {} where ts = '{}' ;".format( # removed: tags ('{}', {})
fullTableName,
# ds.getFixedSuperTableName(),
# ds.getNextBinary(), ds.getNextFloat(),
nextTick)
# print(sql)
# Logging.info("Adding data: {}".format(sql))
dbc.execute(sql)
# Logging.info("Data added: {}".format(sql))
intWrote = intToWrite
# Quick hack, attach an update statement here. TODO: create an "update" task
if (not Config.getConfig().use_shadow_db) and Dice.throw(5) == 0: # 1 in N chance, plus not using shaddow DB
intToUpdate = db.getNextInt() # Updated but should not succeed
# nextColor = db.getNextColor()
sql = "delete from {} where ts = '{}' ;".format( # "INSERt" means "update" here
fullTableName,
nextTick)
# sql = "UPDATE {} set speed={}, color='{}' WHERE ts='{}'".format(
# fullTableName, db.getNextInt(), db.getNextColor(), nextTick)
dbc.execute(sql)
intWrote = intToUpdate # We updated, seems TDengine non-cluster accepts this.
except: # Any exception at all
self._unlockTableIfNeeded(fullTableName)
raise
# Now read it back and verify, we might encounter an error if table is dropped
if Config.getConfig().verify_data: # only if command line asks for it
try:
dbc.query("SELECT * from {}.{} WHERE ts='{}'".
format(db.getName(), regTableName, nextTick))
result = dbc.getQueryResult()
if len(result)==0:
# means data has been delete
print("D1",end="") # DF means delete failed
else:
print("DF",end="") # DF means delete failed
except taos.error.ProgrammingError as err:
errno = Helper.convertErrno(err.errno)
# if errno == CrashGenError.INVALID_EMPTY_RESULT: # empty result
# print("D1",end="") # D1 means delete data success and only 1 record
if errno in [0x218, 0x362,0x2662]: # table doesn't exist
# do nothing
pass
else:
# Re-throw otherwise
raise
finally:
self._unlockTableIfNeeded(fullTableName) # Quite ugly, refactor lock/unlock
# Done with read-back verification, unlock the table now
# Successfully wrote the data into the DB, let's record it somehow
te.recordDataMark(intWrote)
else:
# delete all datas and verify datas ,expected table is empty
if Config.getConfig().record_ops:
self.prepToRecordOps()
if self.fAddLogReady is None:
raise CrashGenError("Unexpected empty fAddLogReady")
self.fAddLogReady.write("Ready to delete {} to {}\n".format(intToWrite, regTableName))
self.fAddLogReady.flush()
os.fsync(self.fAddLogReady.fileno())
# TODO: too ugly trying to lock the table reliably, refactor...
fullTableName = db.getName() + '.' + regTableName
self._lockTableIfNeeded(fullTableName) # so that we are verify read-back. TODO: deal with exceptions before unlock
try:
sql = "delete from {} ;".format( # removed: tags ('{}', {})
fullTableName)
# Logging.info("Adding data: {}".format(sql))
dbc.execute(sql)
# Logging.info("Data added: {}".format(sql))
# Quick hack, attach an update statement here. TODO: create an "update" task
if (not Config.getConfig().use_shadow_db) and Dice.throw(5) == 0: # 1 in N chance, plus not using shaddow DB
sql = "delete from {} ;".format( # "INSERt" means "update" here
fullTableName)
dbc.execute(sql)
except: # Any exception at all
self._unlockTableIfNeeded(fullTableName)
raise
# Now read it back and verify, we might encounter an error if table is dropped
if Config.getConfig().verify_data: # only if command line asks for it
try:
dbc.query("SELECT * from {}.{} WHERE ts='{}'".
format(db.getName(), regTableName, nextTick))
result = dbc.getQueryResult()
if len(result)==0:
# means data has been delete
print("DA",end="")
else:
print("DF",end="") # DF means delete failed
except taos.error.ProgrammingError as err:
errno = Helper.convertErrno(err.errno)
# if errno == CrashGenError.INVALID_EMPTY_RESULT: # empty result
# print("Da",end="") # Da means delete data success and for all datas
if errno in [0x218, 0x362,0x2662]: # table doesn't exist
# do nothing
pass
else:
# Re-throw otherwise
raise
finally:
self._unlockTableIfNeeded(fullTableName) # Quite ugly, refactor lock/unlock
# Done with read-back verification, unlock the table now
if Config.getConfig().record_ops:
if self.fAddLogDone is None:
raise CrashGenError("Unexpected empty fAddLogDone")
self.fAddLogDone.write("Wrote {} to {}\n".format(intWrote, regTableName))
self.fAddLogDone.flush()
os.fsync(self.fAddLogDone.fileno())
def _executeInternal(self, te: TaskExecutor, wt: WorkerThread):
# ds = self._dbManager # Quite DANGEROUS here, may result in multi-thread client access
db = self._db
dbc = wt.getDbConn()
numTables = self.LARGE_NUMBER_OF_TABLES if Config.getConfig().larger_data else self.SMALL_NUMBER_OF_TABLES
numRecords = self.LARGE_NUMBER_OF_RECORDS if Config.getConfig().larger_data else self.SMALL_NUMBER_OF_RECORDS
tblSeq = list(range(numTables ))
random.shuffle(tblSeq) # now we have random sequence
for i in tblSeq:
if (i in self.activeTable): # wow already active
# print("x", end="", flush=True) # concurrent insertion
Progress.emit(Progress.CONCURRENT_INSERTION)
else:
self.activeTable.add(i) # marking it active
dbName = db.getName()
sTable = db.getFixedSuperTable()
regTableName = self.getRegTableName(i) # "db.reg_table_{}".format(i)
fullTableName = dbName + '.' + regTableName
# self._lockTable(fullTableName) # "create table" below. Stop it if the table is "locked"
sTable.ensureRegTable(self, wt.getDbConn(), regTableName) # Ensure the table exists
# self._unlockTable(fullTableName)
self._deleteData(db, dbc, regTableName, te)
self.activeTable.discard(i) # not raising an error, unlike remove
class ThreadStacks: # stack info for all threads
def __init__(self):
@ -2244,6 +2916,9 @@ class ThreadStacks: # stack info for all threads
shortTid = th.native_id % 10000 #type: ignore
self._allStacks[shortTid] = stack # Was using th.native_id
def record_current_time(self,current_time):
self.current_time = current_time
def print(self, filteredEndName = None, filterInternal = False):
for shortTid, stack in self._allStacks.items(): # for each thread, stack frames top to bottom
lastFrame = stack[-1]
@ -2258,8 +2933,11 @@ class ThreadStacks: # stack info for all threads
continue # ignore
# Now print
print("\n<----- Thread Info for LWP/ID: {} (most recent call last) <-----".format(shortTid))
lastSqlForThread = DbConn.fetchSqlForThread(shortTid)
print("Last SQL statement attempted from thread {} is: {}".format(shortTid, lastSqlForThread))
last_sql_commit_time = DbConn.get_save_sql_time(shortTid)
# time_cost = DbConn.get_time_cost()
print("Last SQL statement attempted from thread {} ({:.4f} sec ago) is: {}".format(shortTid, self.current_time-last_sql_commit_time ,lastSqlForThread))
stackFrame = 0
for frame in stack: # was using: reversed(stack)
# print(frame)
@ -2268,6 +2946,8 @@ class ThreadStacks: # stack info for all threads
print(" {}".format(frame.line))
stackFrame += 1
print("-----> End of Thread Info ----->\n")
if self.current_time-last_sql_commit_time >100: # dead lock occured
print("maybe dead locked of thread {} ".format(shortTid))
class ClientManager:
def __init__(self):
@ -2631,4 +3311,3 @@ class Container():
return
self._verifyValidProperty(name)
self._cargo[name] = value

View File

@ -26,10 +26,13 @@ class DbConn:
TYPE_NATIVE = "native-c"
TYPE_REST = "rest-api"
TYPE_INVALID = "invalid"
# class variables
lastSqlFromThreads : dict[int, str] = {} # stored by thread id, obtained from threading.current_thread().ident%10000
spendThreads : dict[int, float] = {} # stored by thread id, obtained from threading.current_thread().ident%10000
current_time : dict[int, float] = {} # save current time
@classmethod
def saveSqlForCurrentThread(cls, sql: str):
'''
@ -37,15 +40,56 @@ class DbConn:
run into a dead-lock situation, we can pick out the deadlocked thread, and use
that information to find what what SQL statement is stuck.
'''
th = threading.current_thread()
shortTid = th.native_id % 10000 #type: ignore
cls.lastSqlFromThreads[shortTid] = sql # Save this for later
cls.record_save_sql_time()
@classmethod
def fetchSqlForThread(cls, shortTid : int) -> str :
def fetchSqlForThread(cls, shortTid : int) -> str :
print("=======================")
if shortTid not in cls.lastSqlFromThreads:
raise CrashGenError("No last-attempted-SQL found for thread id: {}".format(shortTid))
return cls.lastSqlFromThreads[shortTid]
return cls.lastSqlFromThreads[shortTid]
@classmethod
def get_save_sql_time(cls, shortTid : int):
'''
Let us save the last SQL statement on a per-thread basis, so that when later we
run into a dead-lock situation, we can pick out the deadlocked thread, and use
that information to find what what SQL statement is stuck.
'''
return cls.current_time[shortTid]
@classmethod
def record_save_sql_time(cls):
'''
Let us save the last SQL statement on a per-thread basis, so that when later we
run into a dead-lock situation, we can pick out the deadlocked thread, and use
that information to find what what SQL statement is stuck.
'''
th = threading.current_thread()
shortTid = th.native_id % 10000 #type: ignore
cls.current_time[shortTid] = float(time.time()) # Save this for later
@classmethod
def sql_exec_spend(cls, cost: float):
'''
Let us save the last SQL statement on a per-thread basis, so that when later we
run into a dead-lock situation, we can pick out the deadlocked thread, and use
that information to find what what SQL statement is stuck.
'''
th = threading.current_thread()
shortTid = th.native_id % 10000 #type: ignore
cls.spendThreads[shortTid] = cost # Save this for later
@classmethod
def get_time_cost(cls) ->float:
th = threading.current_thread()
shortTid = th.native_id % 10000 #type: ignore
return cls.spendThreads.get(shortTid)
@classmethod
def create(cls, connType, dbTarget):
@ -61,6 +105,7 @@ class DbConn:
def createNative(cls, dbTarget) -> DbConn:
return cls.create(cls.TYPE_NATIVE, dbTarget)
@classmethod
def createRest(cls, dbTarget) -> DbConn:
return cls.create(cls.TYPE_REST, dbTarget)
@ -75,6 +120,7 @@ class DbConn:
return "[DbConn: type={}, target={}]".format(self._type, self._dbTarget)
def getLastSql(self):
return self._lastSql
def open(self):
@ -184,13 +230,19 @@ class DbConnRest(DbConn):
def _doSql(self, sql):
self._lastSql = sql # remember this, last SQL attempted
self.saveSqlForCurrentThread(sql) # Save in global structure too. #TODO: combine with above
try:
time_cost = -1
time_start = time.time()
try:
r = requests.post(self._url,
data = sql,
auth = HTTPBasicAuth('root', 'taosdata'))
auth = HTTPBasicAuth('root', 'taosdata'))
except:
print("REST API Failure (TODO: more info here)")
self.sql_exec_spend(-2)
raise
finally:
time_cost = time.time()- time_start
self.sql_exec_spend(time_cost)
rj = r.json()
# Sanity check for the "Json Result"
if ('status' not in rj):
@ -223,6 +275,8 @@ class DbConnRest(DbConn):
"[SQL-REST] Execution Result, nRows = {}, SQL = {}".format(nRows, sql))
return nRows
def query(self, sql): # return rows affected
return self.execute(sql)
@ -336,6 +390,7 @@ class MyTDSql:
raise
return self.affectedRows
class DbTarget:
def __init__(self, cfgPath, hostAddr, port):
self.cfgPath = cfgPath
@ -355,6 +410,7 @@ class DbConnNative(DbConn):
# _connInfoDisplayed = False # TODO: find another way to display this
totalConnections = 0 # Not private
totalRequests = 0
time_cost = -1
def __init__(self, dbTarget):
super().__init__(dbTarget)
@ -413,8 +469,18 @@ class DbConnNative(DbConn):
"Cannot exec SQL unless db connection is open", CrashGenError.DB_CONNECTION_NOT_OPEN)
Logging.debug("[SQL] Executing SQL: {}".format(sql))
self._lastSql = sql
time_cost = -1
nRows = 0
time_start = time.time()
self.saveSqlForCurrentThread(sql) # Save in global structure too. #TODO: combine with above
nRows = self._tdSql.execute(sql)
try:
nRows= self._tdSql.execute(sql)
except Exception as e:
self.sql_exec_spend(-2)
finally:
time_cost = time.time() - time_start
self.sql_exec_spend(time_cost)
cls = self.__class__
cls.totalRequests += 1
Logging.debug(
@ -494,4 +560,3 @@ class DbManager():
self._dbConn.close()
self._dbConn = None
Logging.debug("DbManager closed DB connection...")

View File

@ -162,7 +162,11 @@ class TDDnode:
def setAsan(self, value):
self.asan = value
if value:
self.execPath = os.path.abspath(self.path + "/tests/script/sh/exec.sh")
selfPath = os.path.dirname(os.path.realpath(__file__))
if ("community" in selfPath):
self.execPath = os.path.abspath(self.path + "/community/tests/script/sh/exec.sh")
else:
self.execPath = os.path.abspath(self.path + "/tests/script/sh/exec.sh")
def getDataSize(self):
totalSize = 0
@ -670,8 +674,13 @@ class TDDnodes:
def setAsan(self, value):
self.asan = value
if value:
self.stopDnodesPath = os.path.abspath(self.path + "/tests/script/sh/stop_dnodes.sh")
self.stopDnodesSigintPath = os.path.abspath(self.path + "/tests/script/sh/sigint_stop_dnodes.sh")
selfPath = os.path.dirname(os.path.realpath(__file__))
if ("community" in selfPath):
self.stopDnodesPath = os.path.abspath(self.path + "/community/tests/script/sh/stop_dnodes.sh")
self.stopDnodesSigintPath = os.path.abspath(self.path + "/community/tests/script/sh/sigint_stop_dnodes.sh")
else:
self.stopDnodesPath = os.path.abspath(self.path + "/tests/script/sh/stop_dnodes.sh")
self.stopDnodesSigintPath = os.path.abspath(self.path + "/tests/script/sh/sigint_stop_dnodes.sh")
tdLog.info("run in address sanitizer mode")
def setKillValgrind(self, value):

View File

@ -33,7 +33,7 @@ class TDLog:
print("\033[1;36m%s %s\033[0m" % (datetime.datetime.now(), err))
def success(self, info):
print("\033[1;32m%s %s\033[0m" % (datetime.datetime.now(), info))
printf("\033[1;32m%s %s\033[0m" % (datetime.datetime.now(), info))
def notice(self, err):
print("\033[1;33m%s %s\033[0m" % (datetime.datetime.now(), err))

View File

@ -3,6 +3,7 @@
set +e
#set -x
unset LD_PRELOAD
SCRIPT_DIR=`dirname $0`
cd $SCRIPT_DIR/../
SCRIPT_DIR=`pwd`
@ -21,19 +22,24 @@ error_num=`cat ${LOG_DIR}/*.asan | grep "ERROR" | wc -l`
memory_leak=`cat ${LOG_DIR}/*.asan | grep "Direct leak" | wc -l`
indirect_leak=`cat ${LOG_DIR}/*.asan | grep "Indirect leak" | wc -l`
runtime_error=`cat ${LOG_DIR}/*.asan | grep "runtime error" | grep -v "trees.c:873" | wc -l`
python_error=`cat ${LOG_DIR}/*.info | grep -w "stack" | wc -l`
echo -e "\033[44;32;1m"asan error_num: $error_num"\033[0m"
echo -e "\033[44;32;1m"asan memory_leak: $memory_leak"\033[0m"
echo -e "\033[44;32;1m"asan indirect_leak: $indirect_leak"\033[0m"
echo -e "\033[44;32;1m"asan runtime error: $runtime_error"\033[0m"
echo -e "\033[44;32;1m"asan python error: $python_error"\033[0m"
let "errors=$error_num+$memory_leak+$indirect_leak+$runtime_error"
let "errors=$error_num+$memory_leak+$indirect_leak+$runtime_error+$python_error"
if [ $errors -eq 0 ]; then
echo -e "\033[44;32;1m"no asan errors"\033[0m"
exit 0
else
echo -e "\033[44;31;1m"asan total errors: $errors"\033[0m"
if [ $python_error -ne 0 ]; then
cat ${LOG_DIR}/*.info
fi
cat ${LOG_DIR}/*.asan
exit 1
fi

View File

@ -11,6 +11,7 @@
set +e
#set -x
unset LD_PRELOAD
UNAME_BIN=`which uname`
OS_TYPE=`$UNAME_BIN`

View File

@ -3,7 +3,7 @@
set +e
#set -x
export LD_PRELOAD=
unset LD_PRELOAD
UNAME_BIN=`which uname`
OS_TYPE=`$UNAME_BIN`

View File

@ -3,10 +3,10 @@
set +e
#set -x
unset LD_PRELOAD
UNAME_BIN=`which uname`
OS_TYPE=`$UNAME_BIN`
export LD_PRELOAD=
PID=`ps -ef|grep /usr/bin/taosd | grep -v grep | awk '{print $2}'`
if [ -n "$PID" ]; then
echo systemctl stop taosd

View File

@ -204,18 +204,12 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(abs_query)
for row_index, row in enumerate(abs_result):
for col_index, elem in enumerate(row):
if auto_result[row_index][col_index] != elem:
check_status = False
if not check_status:
tdLog.notice(
"abs function value has not as expected , sql is \"%s\" " % abs_query)
sys.exit(1)
else:
tdLog.info(
"abs value check pass , it work as expected ,sql is \"%s\" " % abs_query)
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self):
dbname = "testdb"
@ -466,19 +460,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto(f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ",
f"select abs(c1), abs(c2) ,abs(c3), abs(c4), abs(c5) ,abs(c6) from {dbname}.sub1_bound")

View File

@ -426,19 +426,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_function("&", False , f"{dbname}.sub1_bound" ,"c1","c2","c3","c4","c5","c6" )
self.check_function("&", False , f"{dbname}.sub1_bound","abs(c1)","abs(c2)","abs(c3)","abs(c4)","abs(c5)","abs(c6)" )

View File

@ -86,21 +86,12 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(pow_query)
for row_index , row in enumerate(pow_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] == None and not (auto_result[row_index][col_index] == None and elem == None):
check_status = False
elif auto_result[row_index][col_index] != None and (auto_result[row_index][col_index] - elem > 0.00000001):
check_status = False
else:
pass
if not check_status:
tdLog.notice("acos function value has not as expected , sql is \"%s\" "%pow_query )
sys.exit(1)
else:
tdLog.info("acos value check pass , it work as expected ,sql is \"%s\" "%pow_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -414,19 +405,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_acos( f"select abs(c1), abs(c2), abs(c3) , abs(c4), abs(c5) from {dbname}.sub1_bound ", f"select acos(abs(c1)), acos(abs(c2)) ,acos(abs(c3)), acos(abs(c4)), acos(abs(c5)) from {dbname}.sub1_bound")

View File

@ -86,21 +86,13 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(pow_query)
for row_index , row in enumerate(pow_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] == None and not (auto_result[row_index][col_index] == None and elem == None):
check_status = False
elif auto_result[row_index][col_index] != None and (auto_result[row_index][col_index] - elem > 0.00000001):
check_status = False
else:
pass
if not check_status:
tdLog.notice("asin function value has not as expected , sql is \"%s\" "%pow_query )
sys.exit(1)
else:
tdLog.info("asin value check pass , it work as expected ,sql is \"%s\" "%pow_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -414,19 +406,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_asin( f"select abs(c1), abs(c2), abs(c3) , abs(c4), abs(c5) from {dbname}.sub1_bound ", f"select asin(abs(c1)), asin(abs(c2)) ,asin(abs(c3)), asin(abs(c4)), asin(abs(c5)) from {dbname}.sub1_bound")

View File

@ -84,22 +84,12 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(pow_query)
for row_index , row in enumerate(pow_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] == None and elem:
check_status = False
elif auto_result[row_index][col_index] != None and (auto_result[row_index][col_index] - elem > 0.00000001):
check_status = False
else:
pass
if not check_status:
tdLog.notice("atan function value has not as expected , sql is \"%s\" "%pow_query )
sys.exit(1)
else:
tdLog.info("atan value check pass , it work as expected ,sql is \"%s\" "%pow_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
f"select atan from {dbname}.t1",
@ -412,19 +402,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_atan( f"select abs(c1), abs(c2), abs(c3) , abs(c4), abs(c5) from {dbname}.sub1_bound ", f"select atan(abs(c1)), atan(abs(c2)) ,atan(abs(c3)), atan(abs(c4)), atan(abs(c5)) from {dbname}.sub1_bound")

View File

@ -114,16 +114,10 @@ class TDTestCase:
avg_result = tdSql.getResult(origin_query)
origin_result = tdSql.getResult(check_query)
check_status = True
tdSql.query(origin_query)
for row_index , row in enumerate(avg_result):
for col_index , elem in enumerate(row):
if avg_result[row_index][col_index] != origin_result[row_index][col_index]:
check_status = False
if not check_status:
tdLog.notice("avg function value has not as expected , sql is \"%s\" "%origin_query )
sys.exit(1)
else:
tdLog.info("avg value check pass , it work as expected ,sql is \"%s\" "%check_query )
tdSql.checkData(row_index,col_index,origin_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -378,33 +372,33 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483645, 9223372036854775805, 32765, 125, 3.40E+37, 1.7e+307, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483645, 9223372036854775805, 32765, 125, 3.40E+37, 1.7e+307, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483644, 9223372036854775804, 32764, 124, 3.40E+37, 1.7e+307, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483644, 9223372036854775804, 32764, 124, 3.40E+37, 1.7e+307, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+20s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
#self.check_avg(f"select avg(c1), avg(c2), avg(c3) , avg(c4), avg(c5) ,avg(c6) from {dbname}.sub1_bound " , f" select sum(c1)/count(c1), sum(c2)/count(c2) ,sum(c3)/count(c3), sum(c4)/count(c4), sum(c5)/count(c5) ,sum(c6)/count(c6) from {dbname}.sub1_bound ")

View File

@ -85,16 +85,11 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(ceil_query)
for row_index , row in enumerate(ceil_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] != elem:
check_status = False
if not check_status:
tdLog.notice("ceil function value has not as expected , sql is \"%s\" "%ceil_query )
sys.exit(1)
else:
tdLog.info("ceil value check pass , it work as expected ,sql is \"%s\" "%ceil_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -377,10 +372,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -388,15 +383,15 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto( f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ", f"select ceil(c1), ceil(c2) ,ceil(c3), ceil(c4), ceil(c5) ,ceil(c6) from {dbname}.sub1_bound")
self.check_result_auto( f"select c1, c2, c3 , c3, c2 ,c1 from {dbname}.sub1_bound ", f"select ceil(c1), ceil(c2) ,ceil(c3), ceil(c3), ceil(c2) ,ceil(c1) from {dbname}.sub1_bound")

View File

@ -84,26 +84,10 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
print("========",pow_query, origin_query )
tdSql.query(pow_query)
for row_index , row in enumerate(pow_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] == None and elem:
check_status = False
elif auto_result[row_index][col_index] != None and ((auto_result[row_index][col_index] != elem) and (str(auto_result[row_index][col_index])[:6] != str(elem)[:6] )):
# elif auto_result[row_index][col_index] != None and (abs(auto_result[row_index][col_index] - elem) > 0.000001):
print("=====")
print(row_index, col_index)
print(auto_result[row_index][col_index], elem, origin_result[row_index][col_index])
check_status = False
else:
pass
if not check_status:
tdLog.notice("cos function value has not as expected , sql is \"%s\" "%pow_query )
sys.exit(1)
else:
tdLog.info("cos value check pass , it work as expected ,sql is \"%s\" "%pow_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -413,16 +397,16 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
# self.check_result_auto_cos( f"select abs(c1), abs(c2), abs(c3) , abs(c4), abs(c5) from {dbname}.sub1_bound ", f"select cos(abs(c1)), cos(abs(c2)) ,cos(abs(c3)), cos(abs(c4)), cos(abs(c5)) from {dbname}.sub1_bound")

View File

@ -85,16 +85,11 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(floor_query)
for row_index , row in enumerate(floor_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] != elem:
check_status = False
if not check_status:
tdLog.notice("floor function value has not as expected , sql is \"%s\" "%floor_query )
sys.exit(1)
else:
tdLog.info("floor value check pass , it work as expected ,sql is \"%s\" "%floor_query )
tdSql.checkData(row_index,col_index,auto_result[row_index][col_index])
def test_errors(self, dbname=DBNAME):
error_sql_lists = [
@ -388,10 +383,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -399,15 +394,15 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto( f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ", f"select floor(c1), floor(c2) ,floor(c3), floor(c4), floor(c5) ,floor(c6) from {dbname}.sub1_bound")
self.check_result_auto( f"select c1, c2, c3 , c3, c2 ,c1 from {dbname}.sub1_bound ", f"select floor(c1), floor(c2) ,floor(c3), floor(c3), floor(c2) ,floor(c1) from {dbname}.sub1_bound")

View File

@ -364,10 +364,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -375,15 +375,15 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.query(f"select stateduration(c1,'GT',1,1s) from {dbname}.sub1_bound")

File diff suppressed because it is too large Load Diff

View File

@ -572,19 +572,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
# check basic elem for table per row

View File

@ -91,7 +91,6 @@ class TDTestCase:
elem = math.log(elem , base)
elif elem <=0:
elem = None
row_check.append(elem)
auto_result.append(row_check)
@ -519,19 +518,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_log(None , f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ", f"select log(c1), log(c2) ,log(c3), log(c4), log(c5) ,log(c6) from {dbname}.sub1_bound")
self.check_result_auto_log( 2 , f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ", f"select log(c1,2), log(c2,2) ,log(c3,2), log(c4,2), log(c5,2) ,log(c6,2) from {dbname}.sub1_bound")

View File

@ -507,19 +507,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_pow(2, f"select c1, c3 , c4, c5 from {dbname}.sub1_bound ", f"select pow(c1,2), pow(c3,2), pow(c4,2), pow(c5,2) from {dbname}.sub1_bound")
self.check_result_auto_pow(3, f"select c1, c3 , c4, c5 from {dbname}.sub1_bound ", f"select pow(c1,3), pow(c3,3), pow(c4,3), pow(c5,3) from {dbname}.sub1_bound")

View File

@ -81,16 +81,12 @@ class TDTestCase:
row_check.append(elem)
auto_result.append(row_check)
check_status = True
tdSql.query(round_query)
for row_index , row in enumerate(round_result):
for col_index , elem in enumerate(row):
if auto_result[row_index][col_index] != elem:
check_status = False
if not check_status:
tdLog.notice("round function value has not as expected , sql is \"%s\" "%round_query )
sys.exit(1)
else:
tdLog.info("round value check pass , it work as expected ,sql is \"%s\" "%round_query )
tdSql.checkData(row_index , col_index ,auto_result[row_index][col_index])
def test_errors(self, dbname="db"):
error_sql_lists = [
@ -388,10 +384,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -399,15 +395,15 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto( f"select c1, c2, c3 , c4, c5 ,c6 from {dbname}.sub1_bound ", f"select round(c1), round(c2) ,round(c3), round(c4), round(c5) ,round(c6) from {dbname}.sub1_bound")
self.check_result_auto( f"select c1, c2, c3 , c3, c2 ,c1 from {dbname}.sub1_bound ", f"select round(c1), round(c2) ,round(c3), round(c3), round(c2) ,round(c1) from {dbname}.sub1_bound")

View File

@ -394,19 +394,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_sin( f"select abs(c1), abs(c2), abs(c3) , abs(c4) from {dbname}.sub1_bound ", f"select sin(abs(c1)), sin(abs(c2)) ,sin(abs(c3)), sin(abs(c4)) from {dbname}.sub1_bound")

View File

@ -443,19 +443,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_sqrt( f"select abs(c1), abs(c2), abs(c3) , abs(c4), abs(c5) from {dbname}.sub1_bound ", f"select sqrt(abs(c1)), sqrt(abs(c2)) ,sqrt(abs(c3)), sqrt(abs(c4)), sqrt(abs(c5)) from {dbname}.sub1_bound")

View File

@ -451,10 +451,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -462,15 +462,15 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.query(f"select statecount(c1,'GT',1) from {dbname}.sub1_bound")

View File

@ -421,10 +421,10 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
@ -432,20 +432,20 @@ class TDTestCase:
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+15s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.query(f"select tail(c2,2) from {dbname}.sub1_bound order by 1 desc")
tdSql.checkRows(2)
tdSql.checkData(0,0,9223372036854775803)
tdSql.query(f"select tail(c2,1) from {dbname}.sub1_bound order by 1 desc")
tdSql.checkRows(1)
tdSql.checkData(0,0,-9223372036854775803)
def run(self): # sourcery skip: extract-duplicate-method, remove-redundant-fstring
tdSql.prepare()

View File

@ -391,19 +391,19 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, -2147483647, -9223372036854775807, -32767, -127, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
self.check_result_auto_tan( f"select abs(c1), abs(c2), abs(c3) , abs(c4) from {dbname}.sub1_bound ", f"select tan(abs(c1)), tan(abs(c2)) ,tan(abs(c3)), tan(abs(c4)) from {dbname}.sub1_bound")

View File

@ -0,0 +1,40 @@
abs.py: def check_boundary_values(self):
abs.py: self.check_boundary_values()
and_or_for_byte.py: def check_boundary_values(self, dbname="bound_test"):
and_or_for_byte.py: self.check_boundary_values()
arccos.py: def check_boundary_values(self, dbname="bound_test"):
arccos.py: self.check_boundary_values()
arcsin.py: def check_boundary_values(self, dbname="bound_test"):
arcsin.py: self.check_boundary_values()
arctan.py: def check_boundary_values(self, dbname="bound_test"):
arctan.py: self.check_boundary_values()
avg.py: def check_boundary_values(self, dbname="bound_test"):
avg.py: self.check_boundary_values()
ceil.py: def check_boundary_values(self, dbname="bound_test"):
ceil.py: self.check_boundary_values()
cos.py: def check_boundary_values(self, dbname="bound_test"):
cos.py: self.check_boundary_values()
floor.py: def check_boundary_values(self, dbname="bound_test"):
floor.py: self.check_boundary_values()
function_stateduration.py: def check_boundary_values(self, dbname="bound_test"):
function_stateduration.py: self.check_boundary_values()
last_row.py: def check_boundary_values(self, dbname="bound_test"):
last_row.py: self.check_boundary_values()
log.py: def check_boundary_values(self, dbname="bound_test"):
log.py: self.check_boundary_values()
pow.py: def check_boundary_values(self, dbname="bound_test"):
pow.py: self.check_boundary_values()
round.py: def check_boundary_values(self, dbname="bound_test"):
round.py: self.check_boundary_values()
sin.py: def check_boundary_values(self, dbname="testdb"):
sin.py: self.check_boundary_values()
sqrt.py: def check_boundary_values(self, dbname="bound_test"):
sqrt.py: self.check_boundary_values()
statecount.py: def check_boundary_values(self, dbname="bound_test"):
statecount.py: self.check_boundary_values()
tail.py: def check_boundary_values(self, dbname="bound_test"):
tail.py: self.check_boundary_values()
tan.py: def check_boundary_values(self, dbname="bound_test"):
tan.py: self.check_boundary_values()
unique.py: def check_boundary_values(self, dbname="bound_test"):
unique.py: self.check_boundary_values()

View File

@ -451,26 +451,26 @@ class TDTestCase:
)
tdSql.execute(f'create table {dbname}.sub1_bound using {dbname}.stb_bound tags ( 1 )')
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()-1s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-10s, 2147483647, 9223372036854775807, 32767, 127, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now(), 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()-5s, 2147483646, 9223372036854775806, 32766, 126, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()+1s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+5s, -2147483646, -9223372036854775806, -32766, -126, -3.40E+38, -1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()+2s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+10s, 2147483643, 9223372036854775803, 32763, 123, 3.39E+38, 1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.execute(
f"insert into {dbname}.sub1_bound values ( now()+3s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+20s, -2147483643, -9223372036854775803, -32763, -123, -3.39E+38, -1.69e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.error(
f"insert into {dbname}.sub1_bound values ( now()+1s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
f"insert into {dbname}.sub1_bound values ( now()+30s, 2147483648, 9223372036854775808, 32768, 128, 3.40E+38, 1.7e+308, True, 'binary_tb1', 'nchar_tb1', now() )"
)
tdSql.query(f"select unique(c2) from {dbname}.sub1_bound order by 1 desc")

View File

@ -45,13 +45,11 @@ declare -x SIM_DIR=$TOP_DIR/sim
PROGRAM=$BUILD_DIR/build/bin/tsim
PRG_DIR=$SIM_DIR/tsim
ASAN_DIR=$SIM_DIR/asan
SYSTEM_TEST_DIR=$TOP_DIR/tests/system-test
chmod -R 777 $PRG_DIR
echo "------------------------------------------------------------------------"
echo "Start TDengine Testing Case ..."
echo "BUILD_DIR: $BUILD_DIR"
echo "SYSTEM_TEST_DIR : $SYSTEM_TEST_DIR"
echo "SIM_DIR : $SIM_DIR"
echo "CODE_DIR : $CODE_DIR"
echo "ASAN_DIR : $ASAN_DIR"
@ -61,23 +59,40 @@ rm -rf $SIM_DIR/*
mkdir -p $PRG_DIR
mkdir -p $ASAN_DIR
cd $SYSTEM_TEST_DIR
cd $CODE_DIR
ulimit -n 600000
ulimit -c unlimited
#sudo sysctl -w kernel.core_pattern=$TOP_DIR/core.%p.%e
echo "ExcuteCmd:" $*
echo "AsanDir:" $ASAN_DIR/psim.asan
AsanFile=$ASAN_DIR/psim.info
echo "AsanFile:" $AsanFile
export LD_PRELOAD=libasan.so.5
$* -a 2> $ASAN_DIR/psim.asan
result=$?
echo "Execute result:" $result
unset LD_PRELOAD
#export LD_PRELOAD=libasan.so.5
export LD_PRELOAD=`gcc -print-file-name=libasan.so`
echo "Preload AsanSo:" $?
if [ $result -eq 0 ]; then
$CODE_DIR/sh/checkAsan.sh
$* -a 2> $AsanFile
unset LD_PRELOAD
AsanFileLen=`cat $AsanFile | wc -l`
while [ $AsanFileLen -lt 10 ]
do
sleep 1
`cat $AsanFile | wc -l`
done
echo "AsanFileLen:" $AsanFileLen
AsanFileSuccessLen=`grep -w successfully $AsanFile | wc -l`
echo "AsanFileSuccessLen:" $AsanFileSuccessLen
if [ $AsanFileSuccessLen -gt 0 ]; then
echo "Execute script successfully and check asan"
$CODE_DIR/../script/sh/checkAsan.sh
else
echo "Execute script failure"
exit 1
fi

View File

@ -555,5 +555,5 @@ if __name__ == "__main__":
conn.close()
if asan:
tdDnodes.StopAllSigint()
tdLog.info("address sanitizer mode finished")
tdLog.info("Address sanitizer mode finished")
sys.exit(0)