Merge pull request #29286 from taosdata/fix/liaohj

fix(analytics): fix error in recv large post results.
This commit is contained in:
Shengliang Guan 2024-12-24 13:39:12 +08:00 committed by GitHub
commit 713a590f24
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 33 additions and 10 deletions

View File

@ -7,6 +7,8 @@ file_zh_changed = ''
file_en_changed = ''
file_no_doc_changed = '1'
file_only_tdgpt_change_except = '1'
tdgpt_file = "forecastoperator.c\\|anomalywindowoperator.c\\|tanalytics.h\\|tanalytics.c\\|tdgpt_cases.task\\|analytics"
def abortPreviousBuilds() {
def currentJobName = env.JOB_NAME
def currentBuildNumber = env.BUILD_NUMBER.toInteger()
@ -78,7 +80,7 @@ def check_docs(){
file_only_tdgpt_change_except = sh (
script: '''
cd ${WKC}
git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD ${CHANGE_TARGET}`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | grep -v "forecastoperator.c\\|anomalywindowoperator.c\\|tanalytics.h\\|tanalytics.c" |grep -v "tsim/analytics" |grep -v "tdgpt_cases.task" || :
git --no-pager diff --name-only FETCH_HEAD `git merge-base FETCH_HEAD ${CHANGE_TARGET}`|grep -v "^docs/en/"|grep -v "^docs/zh/"|grep -v ".md$" | grep -v ${tdgpt_file} || :
''',
returnStdout: true
).trim()
@ -570,7 +572,7 @@ pipeline {
cd ${WKC}/tests/parallel_test
./run_scan_container.sh -d ${WKDIR} -b ${BRANCH_NAME}_${BUILD_ID} -f ${WKDIR}/tmp/${BRANCH_NAME}_${BUILD_ID}/docs_changed.txt ''' + extra_param + '''
'''
if ( file_no_doc_changed =~ /forecastoperator.c|anomalywindowoperator.c|tsim\/analytics|tdgpt_cases.task/ ) {
if ( file_no_doc_changed =~ /orecastoperator.c|anomalywindowoperator.c|tanalytics.h|tanalytics.c|tdgpt_cases.task|analytics/ ) {
sh '''
cd ${WKC}/tests/parallel_test
export DEFAULT_RETRY_TIME=2

View File

@ -216,14 +216,33 @@ static size_t taosCurlWriteData(char *pCont, size_t contLen, size_t nmemb, void
return 0;
}
pRsp->dataLen = (int64_t)contLen * (int64_t)nmemb;
pRsp->data = taosMemoryMalloc(pRsp->dataLen + 1);
int64_t newDataSize = (int64_t) contLen * nmemb;
int64_t size = pRsp->dataLen + newDataSize;
if (pRsp->data == NULL) {
pRsp->data = taosMemoryMalloc(size + 1);
if (pRsp->data == NULL) {
uError("failed to prepare recv buffer for post rsp, len:%d, code:%s", (int32_t) size + 1, tstrerror(terrno));
return 0; // return the recv length, if failed, return 0
}
} else {
char* p = taosMemoryRealloc(pRsp->data, size + 1);
if (p == NULL) {
uError("failed to prepare recv buffer for post rsp, len:%d, code:%s", (int32_t) size + 1, tstrerror(terrno));
return 0; // return the recv length, if failed, return 0
}
pRsp->data = p;
}
if (pRsp->data != NULL) {
(void)memcpy(pRsp->data, pCont, pRsp->dataLen);
pRsp->data[pRsp->dataLen] = 0;
uDebugL("curl response is received, len:%" PRId64 ", content:%s", pRsp->dataLen, pRsp->data);
return pRsp->dataLen;
(void)memcpy(pRsp->data + pRsp->dataLen, pCont, newDataSize);
pRsp->dataLen = size;
pRsp->data[size] = 0;
uDebugL("curl response is received, len:%" PRId64 ", content:%s", size, pRsp->data);
return newDataSize;
} else {
pRsp->dataLen = 0;
uError("failed to malloc curl response");
@ -478,11 +497,13 @@ static int32_t taosAnalJsonBufWriteStrUseCol(SAnalyticBuf *pBuf, const char *buf
}
if (pBuf->bufType == ANALYTICS_BUF_TYPE_JSON) {
if (taosWriteFile(pBuf->filePtr, buf, bufLen) != bufLen) {
int32_t ret = taosWriteFile(pBuf->filePtr, buf, bufLen);
if (ret != bufLen) {
return terrno;
}
} else {
if (taosWriteFile(pBuf->pCols[colIndex].filePtr, buf, bufLen) != bufLen) {
int32_t ret = taosWriteFile(pBuf->pCols[colIndex].filePtr, buf, bufLen);
if (ret != bufLen) {
return terrno;
}
}