From d6d839a4307665ea3e8834800da94323e29f20dc Mon Sep 17 00:00:00 2001 From: Feng Chao Date: Thu, 13 Mar 2025 17:12:15 +0800 Subject: [PATCH 1/4] update doc-build and ci workflow --- .github/workflows/taosd-ci.yml | 79 +++------------------------ .github/workflows/taosd-doc-build.yml | 1 - 2 files changed, 9 insertions(+), 71 deletions(-) diff --git a/.github/workflows/taosd-ci.yml b/.github/workflows/taosd-ci.yml index 23c0e47ebb..7f6dc5d3d7 100644 --- a/.github/workflows/taosd-ci.yml +++ b/.github/workflows/taosd-ci.yml @@ -9,6 +9,13 @@ on: paths-ignore: - 'packaging/**' - 'docs/**' + - 'tools/tdgpt/**' + - 'source/libs/executor/src/forecastoperator.c' + - 'source/libs/executor/src/anomalywindowoperator.c' + - 'include/common/tanalytics.h' + - 'source/common/src/tanalytics.c' + - 'tests/parallel/tdgpt_cases.task' + - 'tests/script/tsim/analytics' concurrency: group: ${{ github.workflow }}-${{ github.ref }}-TDengine @@ -18,87 +25,19 @@ env: WKC: '/var/lib/jenkins/workspace/TDinternal/community' jobs: - fetch-parameters: - runs-on: - group: CI - labels: [self-hosted, Linux, X64, testing] - outputs: - run_function_test: ${{ steps.parameters.outputs.run_function_test }} - run_tdgpt_test: ${{ steps.parameters.outputs.run_tdgpt_test }} - steps: - - name: Determine trigger source and fetch parameters - id: parameters - run: | - set -euo pipefail - # target_branch=${{ github.event.pull_request.base.ref }} - - # # Fetch the latest code from the target branch - # cd ${{ env.WKC }} - # git reset --hard - # git clean -f - # git remote prune origin - # git fetch - # git checkout "$target_branch" - # git remote prune origin - # git pull >/dev/null - - # # Check whether to run tdgpt test cases - # changed_files_non_doc=$(git --no-pager diff --name-only FETCH_HEAD $(git merge-base FETCH_HEAD $target_branch) | grep -v "^docs/en/" | grep -v "^docs/zh/" | grep -v ".md$" | tr '\n' ' ' || :) - # echo "changed files exclude doc: ${changed_files_non_doc}" - - # if [[ -n "$changed_files_non_doc" && "$changed_files_non_doc" =~ (forecastoperator\.c|anomalywindowoperator\.c|tanalytics\.h|tanalytics\.c|tdgpt_cases\.task|analytics|tdgpt) ]]; then - # run_tdgpt_test="true" - # else - # run_tdgpt_test="false" - # fi - # echo "run tdgpt test: ${run_tdgpt_test}" - - # # Check whether to run function test cases - # changed_files_non_tdgpt=$(git --no-pager diff --name-only FETCH_HEAD $(git merge-base FETCH_HEAD $target_branch) | \ - # grep -v "^docs/en/" | \ - # grep -v "^docs/zh/" | \ - # grep -v ".md$" | \ - # grep -Ev "forecastoperator\.c|anomalywindowoperator\.c|tanalytics\.h|tanalytics\.c|tdgpt_cases\.task|analytics|tdgpt" | \ - # tr '\n' ' ' || :) - # echo "changed files exclude tdgpt: ${changed_files_non_tdgpt}" - - # if [ -n "$changed_files_non_tdgpt" ]; then - # run_function_test="true" - # else - # run_function_test="false" - # fi - - # echo "run function test: ${run_function_test}" - - run_tdgpt_test="true" - run_function_test="true" - # Output the results for GitHub Actions - echo "run_function_test=$run_function_test" >> $GITHUB_OUTPUT - echo "run_tdgpt_test=$run_tdgpt_test" >> $GITHUB_OUTPUT - - echo ${{ github.event.pull_request.head.ref }} - echo ${{ github.event.pull_request.base.ref }} - echo ${{ github.event.pull_request.number }} - run-tests-on-linux: uses: taosdata/.github/.github/workflows/run-tests-on-linux.yml@main - needs: fetch-parameters - if: ${{ needs.fetch-parameters.outputs.run_tdgpt_test == 'true' || needs.fetch-parameters.outputs.run_function_test == 'true' }} with: tdinternal: false - run_function_test: ${{ needs.fetch-parameters.outputs.run_function_test == 'true' }} - run_tdgpt_test: ${{ needs.fetch-parameters.outputs.run_tdgpt_test == 'true' }} + run_function_test: true + run_tdgpt_test: false run-tests-on-mac: uses: taosdata/.github/.github/workflows/run-tests-on-macos.yml@main - needs: fetch-parameters - if: ${{ needs.fetch-parameters.outputs.run_function_test == 'true' }} with: tdinternal: false run-tests-on-windows: uses: taosdata/.github/.github/workflows/run-tests-on-windows.yml@main - needs: fetch-parameters - if: ${{ needs.fetch-parameters.outputs.run_function_test == 'true' }} with: tdinternal: false diff --git a/.github/workflows/taosd-doc-build.yml b/.github/workflows/taosd-doc-build.yml index 28f1f10835..3ac922820a 100644 --- a/.github/workflows/taosd-doc-build.yml +++ b/.github/workflows/taosd-doc-build.yml @@ -7,7 +7,6 @@ on: - '3.0' paths: - 'docs/**' - - '*.md' env: DOC_WKC: "/root/doc_ci_work" From b69faedd7b4a5db356d10324a49e0fbeb64742f5 Mon Sep 17 00:00:00 2001 From: Feng Chao Date: Fri, 14 Mar 2025 07:48:15 +0800 Subject: [PATCH 2/4] =?UTF-8?q?=E6=96=B0=E5=A2=9E=20TDgpt=20CI=20=E5=B7=A5?= =?UTF-8?q?=E4=BD=9C=E6=B5=81=E4=BB=A5=E6=94=AF=E6=8C=81=E5=8D=95=E5=85=83?= =?UTF-8?q?=E6=B5=8B=E8=AF=95=E5=92=8C=E4=BB=A3=E7=A0=81=E6=A3=80=E6=9F=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/tdgpt-ci.yml | 51 ++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 .github/workflows/tdgpt-ci.yml diff --git a/.github/workflows/tdgpt-ci.yml b/.github/workflows/tdgpt-ci.yml new file mode 100644 index 0000000000..7c579e1692 --- /dev/null +++ b/.github/workflows/tdgpt-ci.yml @@ -0,0 +1,51 @@ +name: TDgpt CI + +on: + pull_request: + branches: + - '3.0' + paths: + - 'tools/tdgpt/**' + +jobs: + unit-test: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + + defaults: + run: + working-directory: ${{ github.workspace }}/tools/tdgpt + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install flake8 pytest pylint + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Checking the code with pylint + run: | + pylint $(git ls-files '*.py') --exit-zero + + - name: Checking the code with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + + - name: Run test cases with pytest + run: | + pytest From 44c1643ce2ccd69cb5a469864a34bc2a3feb1a94 Mon Sep 17 00:00:00 2001 From: Feng Chao Date: Fri, 14 Mar 2025 08:05:35 +0800 Subject: [PATCH 3/4] ci: Delete .github/workflows/tdgpt-ci.yml --- .github/workflows/tdgpt-ci.yml | 51 ---------------------------------- 1 file changed, 51 deletions(-) delete mode 100644 .github/workflows/tdgpt-ci.yml diff --git a/.github/workflows/tdgpt-ci.yml b/.github/workflows/tdgpt-ci.yml deleted file mode 100644 index 7c579e1692..0000000000 --- a/.github/workflows/tdgpt-ci.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: TDgpt CI - -on: - pull_request: - branches: - - '3.0' - paths: - - 'tools/tdgpt/**' - -jobs: - unit-test: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - python-version: ["3.10"] - - defaults: - run: - working-directory: ${{ github.workspace }}/tools/tdgpt - - steps: - - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - cache: 'pip' - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install flake8 pytest pylint - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - - name: Checking the code with pylint - run: | - pylint $(git ls-files '*.py') --exit-zero - - - name: Checking the code with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - - name: Run test cases with pytest - run: | - pytest From d3fca89dc975010074987971e3842f01eb4a73a1 Mon Sep 17 00:00:00 2001 From: danielclow <106956386+danielclow@users.noreply.github.com> Date: Fri, 14 Mar 2025 12:32:18 +0800 Subject: [PATCH 4/4] docs: fix two links in function doc --- docs/en/14-reference/03-taos-sql/10-function.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/en/14-reference/03-taos-sql/10-function.md b/docs/en/14-reference/03-taos-sql/10-function.md index 4e60f3284f..b711fe095c 100644 --- a/docs/en/14-reference/03-taos-sql/10-function.md +++ b/docs/en/14-reference/03-taos-sql/10-function.md @@ -2196,8 +2196,8 @@ ignore_null_values: { **Usage Instructions** -- INTERP is used to obtain the record value of a specified column at the specified time slice. It has a dedicated syntax (interp_clause) when used. For syntax introduction, see [reference link](../select/#interp). -- When there is no row data that meets the conditions at the specified time slice, the INTERP function will interpolate according to the settings of the [FILL](../distinguished/#FILL-Clause) parameter. +- INTERP is used to obtain the record value of a specified column at the specified time slice. It has a dedicated syntax (interp_clause) when used. For syntax introduction, see [reference link](../query-data/#interp). +- When there is no row data that meets the conditions at the specified time slice, the INTERP function will interpolate according to the settings of the [FILL](../time-series-extensions/#fill-clause) parameter. - When INTERP is applied to a supertable, it will sort all the subtable data under that supertable by primary key column and perform interpolation calculations, and can also be used with PARTITION BY tbname to force the results to a single timeline. - INTERP can be used with the pseudocolumn _irowts to return the timestamp corresponding to the interpolation point (supported from version 3.0.2.0). - INTERP can be used with the pseudocolumn _isfilled to display whether the return result is from the original record or generated by the interpolation algorithm (supported from version 3.0.3.0).