diff --git a/CMakeLists.txt b/CMakeLists.txt index 315036d115..a55b5fbed9 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -15,6 +15,7 @@ SET(TD_ADMIN FALSE) SET(TD_GRANT FALSE) SET(TD_MQTT FALSE) SET(TD_TSDB_PLUGINS FALSE) +SET(TD_STORAGE FALSE) SET(TD_COVER FALSE) SET(TD_MEM_CHECK FALSE) diff --git a/Jenkinsfile b/Jenkinsfile index 516b179dce..6b3d9e5151 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -5,7 +5,7 @@ node { git url: 'https://github.com/taosdata/TDengine.git' } - +def kipstage=0 def abortPreviousBuilds() { def currentJobName = env.JOB_NAME def currentBuildNumber = env.BUILD_NUMBER.toInteger() @@ -45,6 +45,7 @@ def pre_test(){ git pull git fetch origin +refs/pull/${CHANGE_ID}/merge git checkout -qf FETCH_HEAD + git --no-pager diff --name-only FETCH_HEAD $(git merge-base FETCH_HEAD develop)|grep -v -E '.*md|//src//connector|Jenkinsfile' || exit 0 cd ${WK} git reset --hard HEAD~10 git checkout develop @@ -62,6 +63,7 @@ def pre_test(){ ''' return 1 } + pipeline { agent none @@ -71,21 +73,43 @@ pipeline { } stages { - + stage('pre_build'){ + agent{label 'master'} + when { + changeRequest() + } + steps { + sh''' + cd ${WORKSPACE} + git checkout develop + git pull + git fetch origin +refs/pull/${CHANGE_ID}/merge + git checkout -qf FETCH_HEAD + ''' + script{ + skipstage=sh(script:"git --no-pager diff --name-only FETCH_HEAD develop|grep -v -E '.*md|//src//connector|Jenkinsfile|test-all.sh' || echo 1 ",returnStdout:true) + } + } + } stage('Parallel test stage') { + //only build pr when { changeRequest() + expression { + skipstage != 1 + } } parallel { - stage('python_1') { + stage('python_1_s1') { agent{label 'p1'} steps { pre_test() - timeout(time: 90, unit: 'MINUTES'){ + timeout(time: 45, unit: 'MINUTES'){ sh ''' + date cd ${WKC}/tests find pytest -name '*'sql|xargs rm -rf ./test-all.sh p1 @@ -94,26 +118,38 @@ pipeline { } } - stage('python_2') { + stage('python_2_s5') { agent{label 'p2'} steps { pre_test() + timeout(time: 45, unit: 'MINUTES'){ sh ''' + date cd ${WKC}/tests find pytest -name '*'sql|xargs rm -rf ./test-all.sh p2 date''' - sh ''' - cd ${WKC}/tests - ./test-all.sh b4fq - ''' + } } } - stage('test_b1') { + stage('python_3_s6') { + agent{label 'p3'} + steps { + timeout(time: 45, unit: 'MINUTES'){ + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh p3 + date''' + } + } + } + stage('test_b1_s2') { agent{label 'b1'} steps { - timeout(time: 90, unit: 'MINUTES'){ + timeout(time: 45, unit: 'MINUTES'){ pre_test() sh ''' cd ${WKC}/tests @@ -123,7 +159,7 @@ pipeline { } } - stage('test_crash_gen') { + stage('test_crash_gen_s3') { agent{label "b2"} steps { pre_test() @@ -139,7 +175,7 @@ pipeline { ./handle_crash_gen_val_log.sh ''' } - timeout(time: 90, unit: 'MINUTES'){ + timeout(time: 45, unit: 'MINUTES'){ sh ''' date cd ${WKC}/tests @@ -150,7 +186,7 @@ pipeline { } } - stage('test_valgrind') { + stage('test_valgrind_s4') { agent{label "b3"} steps { @@ -162,7 +198,7 @@ pipeline { ./handle_val_log.sh ''' } - timeout(time: 90, unit: 'MINUTES'){ + timeout(time: 45, unit: 'MINUTES'){ sh ''' date cd ${WKC}/tests @@ -171,17 +207,66 @@ pipeline { } } } - - + stage('test_b4_s7') { + agent{label 'b4'} + steps { + timeout(time: 45, unit: 'MINUTES'){ + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh b4fq + date''' + } + } + } + stage('test_b5_s8') { + agent{label 'b5'} + steps { + timeout(time: 45, unit: 'MINUTES'){ + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh b5fq + date''' + } + } + } + stage('test_b6_s9') { + agent{label 'b6'} + steps { + timeout(time: 45, unit: 'MINUTES'){ + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh b6fq + date''' + } + } + } + stage('test_b7_s10') { + agent{label 'b7'} + steps { + timeout(time: 45, unit: 'MINUTES'){ + pre_test() + sh ''' + date + cd ${WKC}/tests + ./test-all.sh b7fq + date''' + } + } + } } } } - post { - + post { success { emailext ( - subject: "PR-result: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", - body: ''' + subject: "PR-result: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]' SUCCESS", + body: """
@@ -197,29 +282,29 @@ pipeline {