fix: use max column length

This commit is contained in:
kailixu 2023-06-25 16:50:34 +08:00
parent afa186108e
commit a868ed4895
1 changed files with 10 additions and 5 deletions

View File

@ -1699,6 +1699,7 @@ static int32_t estimateJsonLen(SReqResultInfo* pResultInfo, int32_t numOfCols, i
len += lenTmp; len += lenTmp;
pStart += lenTmp; pStart += lenTmp;
int32_t estimateColLen = 0;
for (int32_t j = 0; j < numOfRows; ++j) { for (int32_t j = 0; j < numOfRows; ++j) {
if (offset[j] == -1) { if (offset[j] == -1) {
continue; continue;
@ -1708,20 +1709,21 @@ static int32_t estimateJsonLen(SReqResultInfo* pResultInfo, int32_t numOfCols, i
int32_t jsonInnerType = *data; int32_t jsonInnerType = *data;
char* jsonInnerData = data + CHAR_BYTES; char* jsonInnerData = data + CHAR_BYTES;
if (jsonInnerType == TSDB_DATA_TYPE_NULL) { if (jsonInnerType == TSDB_DATA_TYPE_NULL) {
len += (VARSTR_HEADER_SIZE + strlen(TSDB_DATA_NULL_STR_L)); estimateColLen += (VARSTR_HEADER_SIZE + strlen(TSDB_DATA_NULL_STR_L));
} else if (tTagIsJson(data)) { } else if (tTagIsJson(data)) {
len += (VARSTR_HEADER_SIZE + ((const STag*)(data))->len); estimateColLen += (VARSTR_HEADER_SIZE + ((const STag*)(data))->len);
} else if (jsonInnerType == TSDB_DATA_TYPE_NCHAR) { // value -> "value" } else if (jsonInnerType == TSDB_DATA_TYPE_NCHAR) { // value -> "value"
len += varDataTLen(jsonInnerData) + CHAR_BYTES * 2; estimateColLen += varDataTLen(jsonInnerData) + CHAR_BYTES * 2;
} else if (jsonInnerType == TSDB_DATA_TYPE_DOUBLE) { } else if (jsonInnerType == TSDB_DATA_TYPE_DOUBLE) {
len += (VARSTR_HEADER_SIZE + 32); estimateColLen += (VARSTR_HEADER_SIZE + 32);
} else if (jsonInnerType == TSDB_DATA_TYPE_BOOL) { } else if (jsonInnerType == TSDB_DATA_TYPE_BOOL) {
len += (VARSTR_HEADER_SIZE + 5); estimateColLen += (VARSTR_HEADER_SIZE + 5);
} else { } else {
tscError("estimateJsonLen error: invalid type:%d", jsonInnerType); tscError("estimateJsonLen error: invalid type:%d", jsonInnerType);
return -1; return -1;
} }
} }
len += TMAX(colLen, estimateColLen);
} else if (IS_VAR_DATA_TYPE(pResultInfo->fields[i].type)) { } else if (IS_VAR_DATA_TYPE(pResultInfo->fields[i].type)) {
int32_t lenTmp = numOfRows * sizeof(int32_t); int32_t lenTmp = numOfRows * sizeof(int32_t);
len += (lenTmp + colLen); len += (lenTmp + colLen);
@ -1793,6 +1795,9 @@ static int32_t doConvertJson(SReqResultInfo* pResultInfo, int32_t numOfCols, int
tscError("doConvertJson error: colLen:%d >= dataLen:%d", colLen, dataLen); tscError("doConvertJson error: colLen:%d >= dataLen:%d", colLen, dataLen);
return TSDB_CODE_TSC_INTERNAL_ERROR; return TSDB_CODE_TSC_INTERNAL_ERROR;
} }
if(colLen < dataLen)
if (pResultInfo->fields[i].type == TSDB_DATA_TYPE_JSON) { if (pResultInfo->fields[i].type == TSDB_DATA_TYPE_JSON) {
int32_t* offset = (int32_t*)pStart; int32_t* offset = (int32_t*)pStart;
int32_t* offset1 = (int32_t*)pStart1; int32_t* offset1 = (int32_t*)pStart1;