Merge pull request #29123 from taosdata/fix/main/TS-5763
fix:[TS-5763] Fix error when using selection function with JSON param.
This commit is contained in:
commit
3c423142a9
|
@ -2449,13 +2449,21 @@ static int32_t doSaveCurrentVal(SqlFunctionCtx* pCtx, int32_t rowIndex, int64_t
|
|||
SFirstLastRes* pInfo = GET_ROWCELL_INTERBUF(pResInfo);
|
||||
|
||||
if (IS_VAR_DATA_TYPE(type)) {
|
||||
pInfo->bytes = varDataTLen(pData);
|
||||
if (type == TSDB_DATA_TYPE_JSON) {
|
||||
pInfo->bytes = getJsonValueLen(pData);
|
||||
} else {
|
||||
pInfo->bytes = varDataTLen(pData);
|
||||
}
|
||||
}
|
||||
|
||||
(void)memcpy(pInfo->buf, pData, pInfo->bytes);
|
||||
if (pkData != NULL) {
|
||||
if (IS_VAR_DATA_TYPE(pInfo->pkType)) {
|
||||
pInfo->pkBytes = varDataTLen(pkData);
|
||||
if (pInfo->pkType == TSDB_DATA_TYPE_JSON) {
|
||||
pInfo->pkBytes = getJsonValueLen(pkData);
|
||||
} else {
|
||||
pInfo->pkBytes = varDataTLen(pkData);
|
||||
}
|
||||
}
|
||||
(void)memcpy(pInfo->buf + pInfo->bytes, pkData, pInfo->pkBytes);
|
||||
pInfo->pkData = pInfo->buf + pInfo->bytes;
|
||||
|
@ -2985,7 +2993,11 @@ static int32_t doSaveLastrow(SqlFunctionCtx* pCtx, char* pData, int32_t rowIndex
|
|||
pInfo->isNull = false;
|
||||
|
||||
if (IS_VAR_DATA_TYPE(pInputCol->info.type)) {
|
||||
pInfo->bytes = varDataTLen(pData);
|
||||
if (pInputCol->info.type == TSDB_DATA_TYPE_JSON) {
|
||||
pInfo->bytes = getJsonValueLen(pData);
|
||||
} else {
|
||||
pInfo->bytes = varDataTLen(pData);
|
||||
}
|
||||
}
|
||||
|
||||
(void)memcpy(pInfo->buf, pData, pInfo->bytes);
|
||||
|
@ -2994,7 +3006,11 @@ static int32_t doSaveLastrow(SqlFunctionCtx* pCtx, char* pData, int32_t rowIndex
|
|||
if (pCtx->hasPrimaryKey && !colDataIsNull_s(pkCol, rowIndex)) {
|
||||
char* pkData = colDataGetData(pkCol, rowIndex);
|
||||
if (IS_VAR_DATA_TYPE(pInfo->pkType)) {
|
||||
pInfo->pkBytes = varDataTLen(pkData);
|
||||
if (pInfo->pkType == TSDB_DATA_TYPE_JSON) {
|
||||
pInfo->pkBytes = getJsonValueLen(pkData);
|
||||
} else {
|
||||
pInfo->pkBytes = varDataTLen(pkData);
|
||||
}
|
||||
}
|
||||
(void)memcpy(pInfo->buf + pInfo->bytes, pkData, pInfo->pkBytes);
|
||||
pInfo->pkData = pInfo->buf + pInfo->bytes;
|
||||
|
@ -5872,7 +5888,11 @@ void modeFunctionCleanupExt(SqlFunctionCtx* pCtx) {
|
|||
|
||||
static int32_t saveModeTupleData(SqlFunctionCtx* pCtx, char* data, SModeInfo *pInfo, STuplePos* pPos) {
|
||||
if (IS_VAR_DATA_TYPE(pInfo->colType)) {
|
||||
(void)memcpy(pInfo->buf, data, varDataTLen(data));
|
||||
if (pInfo->colType == TSDB_DATA_TYPE_JSON) {
|
||||
(void)memcpy(pInfo->buf, data, getJsonValueLen(data));
|
||||
} else {
|
||||
(void)memcpy(pInfo->buf, data, varDataTLen(data));
|
||||
}
|
||||
} else {
|
||||
(void)memcpy(pInfo->buf, data, pInfo->colBytes);
|
||||
}
|
||||
|
@ -5882,7 +5902,16 @@ static int32_t saveModeTupleData(SqlFunctionCtx* pCtx, char* data, SModeInfo *pI
|
|||
|
||||
static int32_t doModeAdd(SModeInfo* pInfo, int32_t rowIndex, SqlFunctionCtx* pCtx, char* data) {
|
||||
int32_t code = TSDB_CODE_SUCCESS;
|
||||
int32_t hashKeyBytes = IS_STR_DATA_TYPE(pInfo->colType) ? varDataTLen(data) : pInfo->colBytes;
|
||||
int32_t hashKeyBytes;
|
||||
if (IS_VAR_DATA_TYPE(pInfo->colType)) {
|
||||
if (pInfo->colType == TSDB_DATA_TYPE_JSON) {
|
||||
hashKeyBytes = getJsonValueLen(data);
|
||||
} else {
|
||||
hashKeyBytes = varDataTLen(data);
|
||||
}
|
||||
} else {
|
||||
hashKeyBytes = pInfo->colBytes;
|
||||
}
|
||||
|
||||
SModeItem* pHashItem = (SModeItem *)taosHashGet(pInfo->pHash, data, hashKeyBytes);
|
||||
if (pHashItem == NULL) {
|
||||
|
@ -6654,14 +6683,32 @@ static void doSaveRateInfo(SRateInfo* pRateInfo, bool isFirst, int64_t ts, char*
|
|||
pRateInfo->firstValue = v;
|
||||
pRateInfo->firstKey = ts;
|
||||
if (pRateInfo->firstPk) {
|
||||
int32_t pkBytes = IS_VAR_DATA_TYPE(pRateInfo->pkType) ? varDataTLen(pk) : pRateInfo->pkBytes;
|
||||
int32_t pkBytes;
|
||||
if (IS_VAR_DATA_TYPE(pRateInfo->pkType)) {
|
||||
if (pRateInfo->pkType == TSDB_DATA_TYPE_JSON) {
|
||||
pkBytes = getJsonValueLen(pk);
|
||||
} else {
|
||||
pkBytes = varDataTLen(pk);
|
||||
}
|
||||
} else {
|
||||
pkBytes = pRateInfo->pkBytes;
|
||||
}
|
||||
(void)memcpy(pRateInfo->firstPk, pk, pkBytes);
|
||||
}
|
||||
} else {
|
||||
pRateInfo->lastValue = v;
|
||||
pRateInfo->lastKey = ts;
|
||||
if (pRateInfo->lastPk) {
|
||||
int32_t pkBytes = IS_VAR_DATA_TYPE(pRateInfo->pkType) ? varDataTLen(pk) : pRateInfo->pkBytes;
|
||||
int32_t pkBytes;
|
||||
if (IS_VAR_DATA_TYPE(pRateInfo->pkType)) {
|
||||
if (pRateInfo->pkType == TSDB_DATA_TYPE_JSON) {
|
||||
pkBytes = getJsonValueLen(pk);
|
||||
} else {
|
||||
pkBytes = varDataTLen(pk);
|
||||
}
|
||||
} else {
|
||||
pkBytes = pRateInfo->pkBytes;
|
||||
}
|
||||
(void)memcpy(pRateInfo->lastPk, pk, pkBytes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,106 @@
|
|||
###################################################################
|
||||
# Copyright (c) 2016 by TAOS Technologies, Inc.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This file is proprietary and confidential to TAOS Technologies.
|
||||
# No part of this file may be reproduced, stored, transmitted,
|
||||
# disclosed or used in any form or by any means other than as
|
||||
# expressly provided by the written permission from Jianhui Tao
|
||||
#
|
||||
###################################################################
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from frame import etool
|
||||
from frame.etool import *
|
||||
from frame.log import *
|
||||
from frame.cases import *
|
||||
from frame.sql import *
|
||||
from frame.caseBase import *
|
||||
from frame.common import *
|
||||
|
||||
class TDTestCase(TBase):
|
||||
updatecfgDict = {
|
||||
"keepColumnName": "1",
|
||||
"ttlChangeOnWrite": "1",
|
||||
"querySmaOptimize": "1",
|
||||
"slowLogScope": "none",
|
||||
"queryBufferSize": 10240
|
||||
}
|
||||
|
||||
def insert_data(self):
|
||||
tdLog.info(f"insert data.")
|
||||
tdSql.execute("drop database if exists ts_5763;")
|
||||
tdSql.execute("create database ts_5763;")
|
||||
tdSql.execute("use ts_5763;")
|
||||
tdSql.execute("select database();")
|
||||
tdSql.execute("CREATE STABLE metrics (ts TIMESTAMP, v DOUBLE) TAGS (labels JSON)")
|
||||
tdSql.execute("""CREATE TABLE `metrics_0` USING `metrics` (`labels`) TAGS ('{"ident":"192.168.56.167"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_1` USING `metrics` (`labels`) TAGS ('{"ident":"192.168.56.168"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_2` USING `metrics` (`labels`) TAGS ('{"ident":"192.168.56.169"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_3` USING `metrics` (`labels`) TAGS ('{"ident":"192.168.56.170"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_5` USING `metrics` (`labels`) TAGS ('{"asset_name":"中国政务网"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_6` USING `metrics` (`labels`) TAGS ('{"asset_name":"地大物博阿拉丁快解放啦上课交电费"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_7` USING `metrics` (`labels`) TAGS ('{"asset_name":"no1241-上的六块腹肌阿斯利康的肌肤轮廓设计大方"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_8` USING `metrics` (`labels`) TAGS ('{"asset_name":"no1241-上的六块腹肌阿斯利康的肌肤轮廓设计大方","ident":"192.168.0.1"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_9` USING `metrics` (`labels`) TAGS ('{"asset_name":"no1241-上的六块腹肌阿斯利康的肌肤轮廓设计大方","ident":"192.168.0.1"}');""")
|
||||
tdSql.execute("""CREATE TABLE `metrics_10` USING `metrics` (`labels`) TAGS ('{"asset_name":"上的咖啡机no1241-上的六块腹肌阿斯利康的肌肤轮廓设计大方","ident":"192.168.0.1"}');""")
|
||||
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:39.326',1)")
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:40.891',2)")
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:41.986',3)")
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:42.992',4)")
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:46.927',5)")
|
||||
tdSql.execute("insert into metrics_0 values ('2024-12-12 16:34:48.473',6)")
|
||||
tdSql.execute("insert into metrics_1 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_2 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_3 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_5 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_6 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_7 select * from metrics_0")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:36.459',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:37.388',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:37.622',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:37.852',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:38.081',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:38.307',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:38.535',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:38.792',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:39.035',1)")
|
||||
tdSql.execute("insert into metrics_8 values ('2024-12-12 19:05:39.240',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:29.270',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:30.508',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:31.035',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:31.523',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:31.760',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:32.001',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:32.228',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:32.453',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:32.690',1)")
|
||||
tdSql.execute("insert into metrics_9 values ('2024-12-12 19:05:32.906',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:14.538',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:15.114',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:15.613',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:15.853',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:16.054',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:16.295',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:16.514',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:16.731',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:16.958',1)")
|
||||
tdSql.execute("insert into metrics_10 values ('2024-12-12 19:06:17.176',1)")
|
||||
|
||||
for i in range(1, 10):
|
||||
tdSql.query("select _wstart,first(v)-last(v), first(labels->'asset_name'),first(labels->'ident'),mode(labels->'asset_name'),mode(labels->'ident'),last(labels->'asset_name'),last(labels->'ident') from ts_5763.metrics interval(1s)")
|
||||
tdSql.checkRows(18)
|
||||
|
||||
def run(self):
|
||||
tdLog.debug(f"start to excute {__file__}")
|
||||
|
||||
self.insert_data()
|
||||
|
||||
|
||||
tdLog.success(f"{__file__} successfully executed")
|
||||
|
||||
|
||||
tdCases.addLinux(__file__, TDTestCase())
|
||||
tdCases.addWindows(__file__, TDTestCase())
|
|
@ -15,6 +15,7 @@
|
|||
,,y,army,./pytest.sh python3 ./test.py -f cluster/snapshot.py -N 3 -L 3 -D 2
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_func_elapsed.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_function.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_selection_function_with_json.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_percentile.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_resinfo.py
|
||||
,,y,army,./pytest.sh python3 ./test.py -f query/function/test_interp.py
|
||||
|
|
Loading…
Reference in New Issue